In this project, we are interested in the classification of images into two classes. The images of the study are images of the real world, complex to classify, because all the images do not have the same size, the same shape, they are not monochrome or even centered. In addition, on some images, we observe humans and various backgrounds: which complicates the task of classification for the models.
The database consists of 3000 image files divided into two folders. A cats folder made up of 1500 cat images and adogs folder made up of 1500 dog images.
Task #1: Import Libraries and Data Preprocessing
Task #2: Visualize Examples
Task #3: Create model from scratch
Task #4: Data Preprocessing : Build Image Data Generator
Task #5: Train Model
Task #6: Visualization of the Accuracy and Loss
Task #7: Data Augmentation and Re-training of the model
Task #8: Transfert Learning with Data Augmentation
Task #9: Assess the performance of the trained model
import os
import random
import shutil
from shutil import copyfile
CAT_SOURCE_DIR = os.path.join('./cats/')
DOG_SOURCE_DIR = os.path.join('./dogs/')
print('Total cats images :',len(os.listdir(CAT_SOURCE_DIR)))
print('Total dogs images :', len(os.listdir(DOG_SOURCE_DIR)))
Total cats images : 1500 Total dogs images : 1500
# We use os.mkdir to create our directories.
# We need two directories for training and testing, and subdirectories for cats and dogs.
try:
os.mkdir('./training/')
os.mkdir('./training/cats/')
os.mkdir('./training/dogs/')
os.mkdir('./testing/')
os.mkdir('./testing/cats/')
os.mkdir('./testing/dogs/')
except OSError:
pass
# Directories for training data
TRAINING_DIR = os.path.join('./training/')
# Directories for testing data
TESTING_DIR = os.path.join('./testing/')
# Subdirectories for training data
TRAINING_CATS_DIR = os.path.join('./training/cats/')
TRAINING_DOGS_DIR = os.path.join('./training/dogs/')
# Subdirectories for testing data
TESTING_CATS_DIR = os.path.join('./testing/cats/')
TESTING_DOGS_DIR = os.path.join('./testing/dogs/')
def split_data(SOURCE, TRAINING, TESTING, SPLIT_SIZE = 0.8):
"""
Splitting data for training and testing data.
- SOURCE : directory containing the files (cats or dogs)
- TRAINING : Directory in which a portion of the SOURCE file will be copied.
- TESTING : Directory where the rest of the SOURCE file will be copied.
- SPLIT SIZE : real between 0 and 1, to determine the portion that will be copied in TRAINING and the rest in TESTING
"""
file_names = os.listdir(SOURCE)
file_names_shuffle = random.sample(file_names, len(file_names))
n = int(len(file_names) * SPLIT_SIZE)
training_list = file_names_shuffle[:n]
testing_list = file_names_shuffle[n:]
for file1 in training_list:
shutil.copyfile(os.path.join(SOURCE, file1), os.path.join(TRAINING, file1))
for file2 in testing_list:
shutil.copyfile(os.path.join(SOURCE, file2), os.path.join(TESTING, file2))
# We split data
split_data(CAT_SOURCE_DIR, TRAINING_CATS_DIR, TESTING_CATS_DIR, SPLIT_SIZE = 0.9)
split_data(DOG_SOURCE_DIR, TRAINING_DOGS_DIR, TESTING_DOGS_DIR, SPLIT_SIZE = 0.9)
print('Total training cats :', len(os.listdir(TRAINING_CATS_DIR)))
print('Total training dogs :', len(os.listdir(TRAINING_DOGS_DIR)))
print('Total testing cats :', len(os.listdir(TESTING_CATS_DIR)))
print('Total testing dogs :', len(os.listdir(TESTING_DOGS_DIR)))
Total training cats : 1350 Total training dogs : 1350 Total testing cats : 150 Total testing dogs : 150
os.listdir(TRAINING_DIR)
['cats', 'dogs']
os.listdir(os.path.join(TRAINING_DIR, 'dogs'))[ : 10]
['dog.0.jpg', 'dog.1.jpg', 'dog.100.jpg', 'dog.101.jpg', 'dog.102.jpg', 'dog.103.jpg', 'dog.104.jpg', 'dog.105.jpg', 'dog.107.jpg', 'dog.11.jpg']
os.listdir(os.path.join(TRAINING_DIR, 'cats'))[ : 10]
['cat.0.jpg', 'cat.1.jpg', 'cat.10.jpg', 'cat.100.jpg', 'cat.101.jpg', 'cat.102.jpg', 'cat.103.jpg', 'cat.104.jpg', 'cat.105.jpg', 'cat.106.jpg']
# Check the number of images in the dataset
train_file_path = []
label_names = []
# os.listdir returns the list of files in the folder, in this case image class names
for i in os.listdir(TRAINING_DIR):
train_class = os.listdir(os.path.join(TRAINING_DIR, i))
for j in train_class:
img = os.path.join(TRAINING_DIR, i, j)
train_file_path.append(img)
label_names.append(i)
print('Number of train images : {} \n'.format(len(train_file_path)))
Number of train images : 2700
train_file_path[ :10]
['./training/cats\\cat.0.jpg', './training/cats\\cat.1.jpg', './training/cats\\cat.10.jpg', './training/cats\\cat.100.jpg', './training/cats\\cat.101.jpg', './training/cats\\cat.102.jpg', './training/cats\\cat.103.jpg', './training/cats\\cat.104.jpg', './training/cats\\cat.105.jpg', './training/cats\\cat.106.jpg']
label_names[ :10]
['cats', 'cats', 'cats', 'cats', 'cats', 'cats', 'cats', 'cats', 'cats', 'cats']
# Create Dataframe
import pandas as pd
df = pd.DataFrame({'Images': train_file_path,'Labels': label_names})
df
| Images | Labels | |
|---|---|---|
| 0 | ./training/cats\cat.0.jpg | cats |
| 1 | ./training/cats\cat.1.jpg | cats |
| 2 | ./training/cats\cat.10.jpg | cats |
| 3 | ./training/cats\cat.100.jpg | cats |
| 4 | ./training/cats\cat.102.jpg | cats |
| ... | ... | ... |
| 2695 | ./training/dogs\dog.994.jpg | dogs |
| 2696 | ./training/dogs\dog.995.jpg | dogs |
| 2697 | ./training/dogs\dog.996.jpg | dogs |
| 2698 | ./training/dogs\dog.998.jpg | dogs |
| 2699 | ./training/dogs\dog.999.jpg | dogs |
2700 rows × 2 columns
import seaborn as sns
sns.countplot(x = label_names)
<AxesSubplot:ylabel='count'>
%matplotlib inline
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np
pic_index = 10
cats = np.random.permutation(os.listdir(TRAINING_CATS_DIR))
dogs = np.random.permutation(os.listdir(TRAINING_DOGS_DIR))
next_cats = [os.path.join(TRAINING_CATS_DIR, fname) for fname in cats[0:pic_index]]
next_dogs = [os.path.join(TRAINING_DOGS_DIR, fname) for fname in dogs[0:pic_index]]
plt.figure(figsize = (30, 30))
next_img = np.random.permutation(next_cats + next_dogs)
for i, img_path in enumerate(next_img):
#print(img_path)
plt.subplot(5, 4, i+1)
img = mpimg.imread(img_path)
plt.imshow(img)
plt.xticks([])
plt.yticks([])
plt.xlabel(next_img[i][15:], color = 'green', fontsize=20)
plt.show()
import tensorflow as tf
from tensorflow.keras.layers import Input, Dense, Conv2D, MaxPooling2D, BatchNormalization
from tensorflow.keras.layers import Dropout, Flatten, Activation
from tensorflow.keras.optimizers import RMSprop
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint
def create_model():
def add_conv_block(model, num_filters):
model.add(Conv2D(filters = num_filters, kernel_size = (3,3), strides = (1, 1), activation='relu', padding = 'valid'))
#model.add(BatchNormalization())
model.add(MaxPooling2D(pool_size = (2, 2)))
#model.add(Dropout(rate = 0.2))
return model
model = tf.keras.models.Sequential()
model.add(Input(shape = (150, 150, 3)))
model = add_conv_block(model, 32)
model = add_conv_block(model, 32)
model = add_conv_block(model, 64)
model = add_conv_block(model, 64)
model.add(Flatten())
model.add(Dense(units = 512, activation = 'relu'))
#model.add(Dropout(rate = 0.2))
model.add(Dense(units = 1, activation = 'sigmoid'))
# Compile the model
model.compile(optimizer=RMSprop(lr=1e-4), loss='binary_crossentropy', metrics = ['accuracy'])
return model
model = create_model()
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= conv2d (Conv2D) (None, 148, 148, 32) 896 _________________________________________________________________ max_pooling2d (MaxPooling2D) (None, 74, 74, 32) 0 _________________________________________________________________ conv2d_1 (Conv2D) (None, 72, 72, 32) 9248 _________________________________________________________________ max_pooling2d_1 (MaxPooling2 (None, 36, 36, 32) 0 _________________________________________________________________ conv2d_2 (Conv2D) (None, 34, 34, 64) 18496 _________________________________________________________________ max_pooling2d_2 (MaxPooling2 (None, 17, 17, 64) 0 _________________________________________________________________ conv2d_3 (Conv2D) (None, 15, 15, 64) 36928 _________________________________________________________________ max_pooling2d_3 (MaxPooling2 (None, 7, 7, 64) 0 _________________________________________________________________ flatten (Flatten) (None, 3136) 0 _________________________________________________________________ dense (Dense) (None, 512) 1606144 _________________________________________________________________ dense_1 (Dense) (None, 1) 513 ================================================================= Total params: 1,672,225 Trainable params: 1,672,225 Non-trainable params: 0 _________________________________________________________________
#using early stopping to exit training if validation loss is not decreasing even after certain epochs (patience)
earlystopping = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience=15)
#save the best model with lower validation loss
#checkpointer = ModelCheckpoint(filepath="weights_.hdf5", verbose=1, save_best_only=True)
# Using Callback to control training
class myCallback(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch, log={}):
if(log.get('val_accuracy') >= 0.98):
print("\nReached {}% val_accuracy so cancelling training!".format(round(100 * log.get('val_accuracy'), 2)))
self.model.stop_training = True
# callback instance
callbacks = myCallback()
from tensorflow.keras.preprocessing.image import ImageDataGenerator
# Create run-time augmentation on training and test dataset
# For training datagenerator, we add normalization and validation_split
train_datagen = ImageDataGenerator(rescale = 1 / 255.0,
validation_split = 0.1112)
# For test datagenerator, we only normalize the data.
test_datagen = ImageDataGenerator(rescale=1 / 255.0)
# Flow training images in batches of 128 using train_datagen generator
training_generator = train_datagen.flow_from_directory(
TRAINING_DIR, # This is the source directory for training images
target_size = (150, 150), # All images will be resized to 150x150
batch_size = 20,
subset = 'training',
class_mode = 'binary') # Since we use binary_crossentropy loss, we need binary labels
# Flow training images in batches of 128 using train_datagen generator
validation_generator = train_datagen.flow_from_directory(
TRAINING_DIR, # This is the source directory for training images
target_size = (150, 150), # All images will be resized to 150x150
batch_size = 20,
subset = 'validation',
class_mode ='binary') # Since we use binary_crossentropy loss, we need binary labels
# Flow training images in batches of 128 using train_datagen generator
testing_generator = test_datagen.flow_from_directory(
TESTING_DIR, # This is the source directory for training images
target_size = (150, 150), # All images will be resized to 150x150
batch_size = 20,
class_mode ='binary') # Since we use binary_crossentropy loss, we need binary labels
Found 2400 images belonging to 2 classes. Found 300 images belonging to 2 classes. Found 300 images belonging to 2 classes.
history = model.fit(
training_generator,
steps_per_epoch = training_generator.n // 20,
epochs = 30,
validation_data = validation_generator,
validation_steps = validation_generator.n // 20,
callbacks = [callbacks, earlystopping],
verbose = 2
)
Epoch 1/30 120/120 - 57s - loss: 0.6848 - accuracy: 0.5508 - val_loss: 0.7097 - val_accuracy: 0.5133 Epoch 2/30 120/120 - 61s - loss: 0.6520 - accuracy: 0.6067 - val_loss: 0.6342 - val_accuracy: 0.6467 Epoch 3/30 120/120 - 60s - loss: 0.6204 - accuracy: 0.6642 - val_loss: 0.6174 - val_accuracy: 0.6500 Epoch 4/30 120/120 - 61s - loss: 0.5857 - accuracy: 0.6929 - val_loss: 0.5845 - val_accuracy: 0.6767 Epoch 5/30 120/120 - 64s - loss: 0.5593 - accuracy: 0.7092 - val_loss: 0.5583 - val_accuracy: 0.7200 Epoch 6/30 120/120 - 62s - loss: 0.5362 - accuracy: 0.7308 - val_loss: 0.5463 - val_accuracy: 0.7167 Epoch 7/30 120/120 - 66s - loss: 0.5214 - accuracy: 0.7375 - val_loss: 0.5342 - val_accuracy: 0.7467 Epoch 8/30 120/120 - 61s - loss: 0.5005 - accuracy: 0.7583 - val_loss: 0.5454 - val_accuracy: 0.7533 Epoch 9/30 120/120 - 63s - loss: 0.4813 - accuracy: 0.7696 - val_loss: 0.5121 - val_accuracy: 0.7300 Epoch 10/30 120/120 - 65s - loss: 0.4698 - accuracy: 0.7721 - val_loss: 0.5576 - val_accuracy: 0.6900 Epoch 11/30 120/120 - 66s - loss: 0.4499 - accuracy: 0.7912 - val_loss: 0.4965 - val_accuracy: 0.7767 Epoch 12/30 120/120 - 62s - loss: 0.4252 - accuracy: 0.7946 - val_loss: 0.4822 - val_accuracy: 0.7700 Epoch 13/30 120/120 - 60s - loss: 0.4109 - accuracy: 0.8121 - val_loss: 0.4736 - val_accuracy: 0.7700 Epoch 14/30 120/120 - 61s - loss: 0.3953 - accuracy: 0.8263 - val_loss: 0.4543 - val_accuracy: 0.8067 Epoch 15/30 120/120 - 66s - loss: 0.3756 - accuracy: 0.8354 - val_loss: 0.4780 - val_accuracy: 0.7867 Epoch 16/30 120/120 - 67s - loss: 0.3582 - accuracy: 0.8450 - val_loss: 0.4677 - val_accuracy: 0.7800 Epoch 17/30 120/120 - 64s - loss: 0.3441 - accuracy: 0.8454 - val_loss: 0.4839 - val_accuracy: 0.7667 Epoch 18/30 120/120 - 69s - loss: 0.3259 - accuracy: 0.8621 - val_loss: 0.4720 - val_accuracy: 0.7900 Epoch 19/30 120/120 - 72s - loss: 0.3066 - accuracy: 0.8679 - val_loss: 0.4603 - val_accuracy: 0.8000 Epoch 20/30 120/120 - 70s - loss: 0.2861 - accuracy: 0.8858 - val_loss: 0.4676 - val_accuracy: 0.7933 Epoch 21/30 120/120 - 72s - loss: 0.2737 - accuracy: 0.8913 - val_loss: 0.4659 - val_accuracy: 0.8033 Epoch 22/30 120/120 - 74s - loss: 0.2628 - accuracy: 0.8933 - val_loss: 0.4670 - val_accuracy: 0.7833 Epoch 23/30 120/120 - 66s - loss: 0.2501 - accuracy: 0.8988 - val_loss: 0.4748 - val_accuracy: 0.7967 Epoch 24/30 120/120 - 68s - loss: 0.2296 - accuracy: 0.9129 - val_loss: 0.4837 - val_accuracy: 0.7900 Epoch 25/30 120/120 - 67s - loss: 0.2133 - accuracy: 0.9233 - val_loss: 0.4826 - val_accuracy: 0.7933 Epoch 26/30 120/120 - 68s - loss: 0.1984 - accuracy: 0.9242 - val_loss: 0.6656 - val_accuracy: 0.7267 Epoch 27/30 120/120 - 70s - loss: 0.1792 - accuracy: 0.9333 - val_loss: 0.4998 - val_accuracy: 0.8000 Epoch 28/30 120/120 - 70s - loss: 0.1716 - accuracy: 0.9371 - val_loss: 0.5013 - val_accuracy: 0.7933 Epoch 29/30 120/120 - 71s - loss: 0.1556 - accuracy: 0.9413 - val_loss: 0.6092 - val_accuracy: 0.7467 Epoch 00029: early stopping
def graph_plot(history):
"""
plot training and validation accuracy per epoch, then training and validation loss.
Argument :
- history : output of the trained model
"""
#-----------------------------------------------------------
# Retrieve a list of results on training and test data
# sets for each training epoch
#-----------------------------------------------------------
train_acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
train_loss = history.history['loss']
val_loss = history.history['val_loss']
num_epochs = range(len(train_acc)) # Get number of epochs
#------------------------------------------------
# Plot training and validation accuracy per epoch
#------------------------------------------------
import matplotlib.pyplot as plt
plt.figure(figsize = (15, 4))
plt.subplot(1, 2, 1)
plt.plot(num_epochs, train_acc, 'bo', label='Training accuracy')
plt.plot(num_epochs, val_acc, 'b', label='Validation accuracy')
plt.xlabel("numbers of epochs")
plt.ylabel("accuracy")
plt.grid(linestyle='--')
plt.title('Training and validation accuracy')
plt.legend()
#------------------------------------------------
# Plot training and validation loss per epoch
#------------------------------------------------
plt.subplot(1, 2, 2)
plt.plot(num_epochs, train_loss, 'bo', label='Training loss')
plt.plot(num_epochs, val_loss, 'b', label='Validation loss')
plt.xlabel("numbers of epochs")
plt.ylabel("loss")
plt.grid(linestyle='--')
plt.title('Training and validation loss')
plt.legend()
plt.show()
graph_plot(history)
# Create run-time augmentation on training and test dataset
# For training datagenerator, we add normalization and validation_split
train_datagen = ImageDataGenerator(rescale = 1 / 255.0,
rotation_range = 45,
width_shift_range = 0.3,
height_shift_range = 0.3,
shear_range = 0.4,
zoom_range = 0.4,
horizontal_flip = True,
fill_mode = 'nearest',
validation_split = 0.1112)
# For test datagenerator, we only normalize the data.
test_datagen = ImageDataGenerator(rescale=1 / 255.0)
# Flow training images in batches of 128 using train_datagen generator
training_generator = train_datagen.flow_from_directory(
TRAINING_DIR, # This is the source directory for training images
target_size = (150, 150), # All images will be resized to 300x300
batch_size = 20,
subset = 'training',
class_mode = 'binary') # Since we use binary_crossentropy loss, we need binary labels
# Flow training images in batches of 128 using train_datagen generator
validation_generator = train_datagen.flow_from_directory(
TRAINING_DIR, # This is the source directory for training images
target_size = (150, 150), # All images will be resized to 150x150
batch_size = 20,
subset = 'validation',
class_mode ='binary') # Since we use binary_crossentropy loss, we need binary labels
# Flow training images in batches of 128 using train_datagen generator
testing_generator = test_datagen.flow_from_directory(
TESTING_DIR, # This is the source directory for training images
target_size = (150, 150), # All images will be resized to 150x150
batch_size = 20,
class_mode ='binary') # Since we use binary_crossentropy loss, we need binary labels
Found 2400 images belonging to 2 classes. Found 300 images belonging to 2 classes. Found 300 images belonging to 2 classes.
history2 = model.fit(
training_generator,
steps_per_epoch = training_generator.n // 20,
epochs = 20,
validation_data = validation_generator,
validation_steps = validation_generator.n // 20,
callbacks = [callbacks, earlystopping],
verbose = 2
)
Epoch 1/20 120/120 - 67s - loss: 0.3932 - accuracy: 0.8242 - val_loss: 0.3783 - val_accuracy: 0.8467 Epoch 2/20 120/120 - 69s - loss: 0.3696 - accuracy: 0.8354 - val_loss: 0.3648 - val_accuracy: 0.8433 Epoch 3/20 120/120 - 69s - loss: 0.3998 - accuracy: 0.8200 - val_loss: 0.4152 - val_accuracy: 0.8300 Epoch 4/20 120/120 - 71s - loss: 0.3716 - accuracy: 0.8288 - val_loss: 0.3748 - val_accuracy: 0.8500 Epoch 5/20 120/120 - 67s - loss: 0.3964 - accuracy: 0.8325 - val_loss: 0.4517 - val_accuracy: 0.8100 Epoch 6/20 120/120 - 70s - loss: 0.3796 - accuracy: 0.8313 - val_loss: 0.3790 - val_accuracy: 0.8400 Epoch 7/20 120/120 - 67s - loss: 0.3723 - accuracy: 0.8388 - val_loss: 0.4969 - val_accuracy: 0.7833 Epoch 8/20 120/120 - 72s - loss: 0.3803 - accuracy: 0.8304 - val_loss: 0.3611 - val_accuracy: 0.8400 Epoch 9/20 120/120 - 70s - loss: 0.3778 - accuracy: 0.8346 - val_loss: 0.4146 - val_accuracy: 0.8167 Epoch 10/20 120/120 - 68s - loss: 0.3762 - accuracy: 0.8375 - val_loss: 0.5552 - val_accuracy: 0.7533 Epoch 11/20 120/120 - 70s - loss: 0.3713 - accuracy: 0.8404 - val_loss: 0.3857 - val_accuracy: 0.8300 Epoch 12/20 120/120 - 67s - loss: 0.3950 - accuracy: 0.8221 - val_loss: 0.4406 - val_accuracy: 0.8267 Epoch 13/20 120/120 - 71s - loss: 0.3896 - accuracy: 0.8200 - val_loss: 0.3851 - val_accuracy: 0.8333 Epoch 14/20 120/120 - 68s - loss: 0.3832 - accuracy: 0.8342 - val_loss: 0.4732 - val_accuracy: 0.7700 Epoch 15/20 120/120 - 69s - loss: 0.3837 - accuracy: 0.8313 - val_loss: 0.4318 - val_accuracy: 0.8000 Epoch 16/20 120/120 - 70s - loss: 0.3830 - accuracy: 0.8329 - val_loss: 0.4189 - val_accuracy: 0.8100 Epoch 17/20 120/120 - 68s - loss: 0.3742 - accuracy: 0.8300 - val_loss: 0.4369 - val_accuracy: 0.8067 Epoch 18/20 120/120 - 71s - loss: 0.3796 - accuracy: 0.8379 - val_loss: 0.3325 - val_accuracy: 0.8533 Epoch 19/20 120/120 - 67s - loss: 0.3774 - accuracy: 0.8350 - val_loss: 0.4061 - val_accuracy: 0.8233 Epoch 20/20 120/120 - 71s - loss: 0.4035 - accuracy: 0.8225 - val_loss: 0.4578 - val_accuracy: 0.7867
graph_plot(history2)
Use transfert learning to improve performances of the model. We use the weights of a pre-trained InceptionV3 model on the ImageNet database for the transfer learning
from tensorflow.keras import layers, Model
# Import the inception model
from tensorflow.keras.applications.inception_v3 import InceptionV3
weights_file = "./inception_v3_weights_tf_dim_ordering_tf_kernels_notop.h5"
# Create an instance of the inception model
pre_trained_model = InceptionV3(
include_top = False,
input_shape = (150, 150, 3),
weights = None
)
# Loading weights
pre_trained_model.load_weights(weights_file)
# Summary of the model
pre_trained_model.summary()
# Make all the layers in the pre-trained model non-trainable
for layer in pre_trained_model.layers:
layer.trainable = True
Model: "inception_v3"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_3 (InputLayer) [(None, 150, 150, 3) 0
__________________________________________________________________________________________________
conv2d_98 (Conv2D) (None, 74, 74, 32) 864 input_3[0][0]
__________________________________________________________________________________________________
batch_normalization_94 (BatchNo (None, 74, 74, 32) 96 conv2d_98[0][0]
__________________________________________________________________________________________________
activation_94 (Activation) (None, 74, 74, 32) 0 batch_normalization_94[0][0]
__________________________________________________________________________________________________
conv2d_99 (Conv2D) (None, 72, 72, 32) 9216 activation_94[0][0]
__________________________________________________________________________________________________
batch_normalization_95 (BatchNo (None, 72, 72, 32) 96 conv2d_99[0][0]
__________________________________________________________________________________________________
activation_95 (Activation) (None, 72, 72, 32) 0 batch_normalization_95[0][0]
__________________________________________________________________________________________________
conv2d_100 (Conv2D) (None, 72, 72, 64) 18432 activation_95[0][0]
__________________________________________________________________________________________________
batch_normalization_96 (BatchNo (None, 72, 72, 64) 192 conv2d_100[0][0]
__________________________________________________________________________________________________
activation_96 (Activation) (None, 72, 72, 64) 0 batch_normalization_96[0][0]
__________________________________________________________________________________________________
max_pooling2d_8 (MaxPooling2D) (None, 35, 35, 64) 0 activation_96[0][0]
__________________________________________________________________________________________________
conv2d_101 (Conv2D) (None, 35, 35, 80) 5120 max_pooling2d_8[0][0]
__________________________________________________________________________________________________
batch_normalization_97 (BatchNo (None, 35, 35, 80) 240 conv2d_101[0][0]
__________________________________________________________________________________________________
activation_97 (Activation) (None, 35, 35, 80) 0 batch_normalization_97[0][0]
__________________________________________________________________________________________________
conv2d_102 (Conv2D) (None, 33, 33, 192) 138240 activation_97[0][0]
__________________________________________________________________________________________________
batch_normalization_98 (BatchNo (None, 33, 33, 192) 576 conv2d_102[0][0]
__________________________________________________________________________________________________
activation_98 (Activation) (None, 33, 33, 192) 0 batch_normalization_98[0][0]
__________________________________________________________________________________________________
max_pooling2d_9 (MaxPooling2D) (None, 16, 16, 192) 0 activation_98[0][0]
__________________________________________________________________________________________________
conv2d_106 (Conv2D) (None, 16, 16, 64) 12288 max_pooling2d_9[0][0]
__________________________________________________________________________________________________
batch_normalization_102 (BatchN (None, 16, 16, 64) 192 conv2d_106[0][0]
__________________________________________________________________________________________________
activation_102 (Activation) (None, 16, 16, 64) 0 batch_normalization_102[0][0]
__________________________________________________________________________________________________
conv2d_104 (Conv2D) (None, 16, 16, 48) 9216 max_pooling2d_9[0][0]
__________________________________________________________________________________________________
conv2d_107 (Conv2D) (None, 16, 16, 96) 55296 activation_102[0][0]
__________________________________________________________________________________________________
batch_normalization_100 (BatchN (None, 16, 16, 48) 144 conv2d_104[0][0]
__________________________________________________________________________________________________
batch_normalization_103 (BatchN (None, 16, 16, 96) 288 conv2d_107[0][0]
__________________________________________________________________________________________________
activation_100 (Activation) (None, 16, 16, 48) 0 batch_normalization_100[0][0]
__________________________________________________________________________________________________
activation_103 (Activation) (None, 16, 16, 96) 0 batch_normalization_103[0][0]
__________________________________________________________________________________________________
average_pooling2d_9 (AveragePoo (None, 16, 16, 192) 0 max_pooling2d_9[0][0]
__________________________________________________________________________________________________
conv2d_103 (Conv2D) (None, 16, 16, 64) 12288 max_pooling2d_9[0][0]
__________________________________________________________________________________________________
conv2d_105 (Conv2D) (None, 16, 16, 64) 76800 activation_100[0][0]
__________________________________________________________________________________________________
conv2d_108 (Conv2D) (None, 16, 16, 96) 82944 activation_103[0][0]
__________________________________________________________________________________________________
conv2d_109 (Conv2D) (None, 16, 16, 32) 6144 average_pooling2d_9[0][0]
__________________________________________________________________________________________________
batch_normalization_99 (BatchNo (None, 16, 16, 64) 192 conv2d_103[0][0]
__________________________________________________________________________________________________
batch_normalization_101 (BatchN (None, 16, 16, 64) 192 conv2d_105[0][0]
__________________________________________________________________________________________________
batch_normalization_104 (BatchN (None, 16, 16, 96) 288 conv2d_108[0][0]
__________________________________________________________________________________________________
batch_normalization_105 (BatchN (None, 16, 16, 32) 96 conv2d_109[0][0]
__________________________________________________________________________________________________
activation_99 (Activation) (None, 16, 16, 64) 0 batch_normalization_99[0][0]
__________________________________________________________________________________________________
activation_101 (Activation) (None, 16, 16, 64) 0 batch_normalization_101[0][0]
__________________________________________________________________________________________________
activation_104 (Activation) (None, 16, 16, 96) 0 batch_normalization_104[0][0]
__________________________________________________________________________________________________
activation_105 (Activation) (None, 16, 16, 32) 0 batch_normalization_105[0][0]
__________________________________________________________________________________________________
mixed0 (Concatenate) (None, 16, 16, 256) 0 activation_99[0][0]
activation_101[0][0]
activation_104[0][0]
activation_105[0][0]
__________________________________________________________________________________________________
conv2d_113 (Conv2D) (None, 16, 16, 64) 16384 mixed0[0][0]
__________________________________________________________________________________________________
batch_normalization_109 (BatchN (None, 16, 16, 64) 192 conv2d_113[0][0]
__________________________________________________________________________________________________
activation_109 (Activation) (None, 16, 16, 64) 0 batch_normalization_109[0][0]
__________________________________________________________________________________________________
conv2d_111 (Conv2D) (None, 16, 16, 48) 12288 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_114 (Conv2D) (None, 16, 16, 96) 55296 activation_109[0][0]
__________________________________________________________________________________________________
batch_normalization_107 (BatchN (None, 16, 16, 48) 144 conv2d_111[0][0]
__________________________________________________________________________________________________
batch_normalization_110 (BatchN (None, 16, 16, 96) 288 conv2d_114[0][0]
__________________________________________________________________________________________________
activation_107 (Activation) (None, 16, 16, 48) 0 batch_normalization_107[0][0]
__________________________________________________________________________________________________
activation_110 (Activation) (None, 16, 16, 96) 0 batch_normalization_110[0][0]
__________________________________________________________________________________________________
average_pooling2d_10 (AveragePo (None, 16, 16, 256) 0 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_110 (Conv2D) (None, 16, 16, 64) 16384 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_112 (Conv2D) (None, 16, 16, 64) 76800 activation_107[0][0]
__________________________________________________________________________________________________
conv2d_115 (Conv2D) (None, 16, 16, 96) 82944 activation_110[0][0]
__________________________________________________________________________________________________
conv2d_116 (Conv2D) (None, 16, 16, 64) 16384 average_pooling2d_10[0][0]
__________________________________________________________________________________________________
batch_normalization_106 (BatchN (None, 16, 16, 64) 192 conv2d_110[0][0]
__________________________________________________________________________________________________
batch_normalization_108 (BatchN (None, 16, 16, 64) 192 conv2d_112[0][0]
__________________________________________________________________________________________________
batch_normalization_111 (BatchN (None, 16, 16, 96) 288 conv2d_115[0][0]
__________________________________________________________________________________________________
batch_normalization_112 (BatchN (None, 16, 16, 64) 192 conv2d_116[0][0]
__________________________________________________________________________________________________
activation_106 (Activation) (None, 16, 16, 64) 0 batch_normalization_106[0][0]
__________________________________________________________________________________________________
activation_108 (Activation) (None, 16, 16, 64) 0 batch_normalization_108[0][0]
__________________________________________________________________________________________________
activation_111 (Activation) (None, 16, 16, 96) 0 batch_normalization_111[0][0]
__________________________________________________________________________________________________
activation_112 (Activation) (None, 16, 16, 64) 0 batch_normalization_112[0][0]
__________________________________________________________________________________________________
mixed1 (Concatenate) (None, 16, 16, 288) 0 activation_106[0][0]
activation_108[0][0]
activation_111[0][0]
activation_112[0][0]
__________________________________________________________________________________________________
conv2d_120 (Conv2D) (None, 16, 16, 64) 18432 mixed1[0][0]
__________________________________________________________________________________________________
batch_normalization_116 (BatchN (None, 16, 16, 64) 192 conv2d_120[0][0]
__________________________________________________________________________________________________
activation_116 (Activation) (None, 16, 16, 64) 0 batch_normalization_116[0][0]
__________________________________________________________________________________________________
conv2d_118 (Conv2D) (None, 16, 16, 48) 13824 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_121 (Conv2D) (None, 16, 16, 96) 55296 activation_116[0][0]
__________________________________________________________________________________________________
batch_normalization_114 (BatchN (None, 16, 16, 48) 144 conv2d_118[0][0]
__________________________________________________________________________________________________
batch_normalization_117 (BatchN (None, 16, 16, 96) 288 conv2d_121[0][0]
__________________________________________________________________________________________________
activation_114 (Activation) (None, 16, 16, 48) 0 batch_normalization_114[0][0]
__________________________________________________________________________________________________
activation_117 (Activation) (None, 16, 16, 96) 0 batch_normalization_117[0][0]
__________________________________________________________________________________________________
average_pooling2d_11 (AveragePo (None, 16, 16, 288) 0 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_117 (Conv2D) (None, 16, 16, 64) 18432 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_119 (Conv2D) (None, 16, 16, 64) 76800 activation_114[0][0]
__________________________________________________________________________________________________
conv2d_122 (Conv2D) (None, 16, 16, 96) 82944 activation_117[0][0]
__________________________________________________________________________________________________
conv2d_123 (Conv2D) (None, 16, 16, 64) 18432 average_pooling2d_11[0][0]
__________________________________________________________________________________________________
batch_normalization_113 (BatchN (None, 16, 16, 64) 192 conv2d_117[0][0]
__________________________________________________________________________________________________
batch_normalization_115 (BatchN (None, 16, 16, 64) 192 conv2d_119[0][0]
__________________________________________________________________________________________________
batch_normalization_118 (BatchN (None, 16, 16, 96) 288 conv2d_122[0][0]
__________________________________________________________________________________________________
batch_normalization_119 (BatchN (None, 16, 16, 64) 192 conv2d_123[0][0]
__________________________________________________________________________________________________
activation_113 (Activation) (None, 16, 16, 64) 0 batch_normalization_113[0][0]
__________________________________________________________________________________________________
activation_115 (Activation) (None, 16, 16, 64) 0 batch_normalization_115[0][0]
__________________________________________________________________________________________________
activation_118 (Activation) (None, 16, 16, 96) 0 batch_normalization_118[0][0]
__________________________________________________________________________________________________
activation_119 (Activation) (None, 16, 16, 64) 0 batch_normalization_119[0][0]
__________________________________________________________________________________________________
mixed2 (Concatenate) (None, 16, 16, 288) 0 activation_113[0][0]
activation_115[0][0]
activation_118[0][0]
activation_119[0][0]
__________________________________________________________________________________________________
conv2d_125 (Conv2D) (None, 16, 16, 64) 18432 mixed2[0][0]
__________________________________________________________________________________________________
batch_normalization_121 (BatchN (None, 16, 16, 64) 192 conv2d_125[0][0]
__________________________________________________________________________________________________
activation_121 (Activation) (None, 16, 16, 64) 0 batch_normalization_121[0][0]
__________________________________________________________________________________________________
conv2d_126 (Conv2D) (None, 16, 16, 96) 55296 activation_121[0][0]
__________________________________________________________________________________________________
batch_normalization_122 (BatchN (None, 16, 16, 96) 288 conv2d_126[0][0]
__________________________________________________________________________________________________
activation_122 (Activation) (None, 16, 16, 96) 0 batch_normalization_122[0][0]
__________________________________________________________________________________________________
conv2d_124 (Conv2D) (None, 7, 7, 384) 995328 mixed2[0][0]
__________________________________________________________________________________________________
conv2d_127 (Conv2D) (None, 7, 7, 96) 82944 activation_122[0][0]
__________________________________________________________________________________________________
batch_normalization_120 (BatchN (None, 7, 7, 384) 1152 conv2d_124[0][0]
__________________________________________________________________________________________________
batch_normalization_123 (BatchN (None, 7, 7, 96) 288 conv2d_127[0][0]
__________________________________________________________________________________________________
activation_120 (Activation) (None, 7, 7, 384) 0 batch_normalization_120[0][0]
__________________________________________________________________________________________________
activation_123 (Activation) (None, 7, 7, 96) 0 batch_normalization_123[0][0]
__________________________________________________________________________________________________
max_pooling2d_10 (MaxPooling2D) (None, 7, 7, 288) 0 mixed2[0][0]
__________________________________________________________________________________________________
mixed3 (Concatenate) (None, 7, 7, 768) 0 activation_120[0][0]
activation_123[0][0]
max_pooling2d_10[0][0]
__________________________________________________________________________________________________
conv2d_132 (Conv2D) (None, 7, 7, 128) 98304 mixed3[0][0]
__________________________________________________________________________________________________
batch_normalization_128 (BatchN (None, 7, 7, 128) 384 conv2d_132[0][0]
__________________________________________________________________________________________________
activation_128 (Activation) (None, 7, 7, 128) 0 batch_normalization_128[0][0]
__________________________________________________________________________________________________
conv2d_133 (Conv2D) (None, 7, 7, 128) 114688 activation_128[0][0]
__________________________________________________________________________________________________
batch_normalization_129 (BatchN (None, 7, 7, 128) 384 conv2d_133[0][0]
__________________________________________________________________________________________________
activation_129 (Activation) (None, 7, 7, 128) 0 batch_normalization_129[0][0]
__________________________________________________________________________________________________
conv2d_129 (Conv2D) (None, 7, 7, 128) 98304 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_134 (Conv2D) (None, 7, 7, 128) 114688 activation_129[0][0]
__________________________________________________________________________________________________
batch_normalization_125 (BatchN (None, 7, 7, 128) 384 conv2d_129[0][0]
__________________________________________________________________________________________________
batch_normalization_130 (BatchN (None, 7, 7, 128) 384 conv2d_134[0][0]
__________________________________________________________________________________________________
activation_125 (Activation) (None, 7, 7, 128) 0 batch_normalization_125[0][0]
__________________________________________________________________________________________________
activation_130 (Activation) (None, 7, 7, 128) 0 batch_normalization_130[0][0]
__________________________________________________________________________________________________
conv2d_130 (Conv2D) (None, 7, 7, 128) 114688 activation_125[0][0]
__________________________________________________________________________________________________
conv2d_135 (Conv2D) (None, 7, 7, 128) 114688 activation_130[0][0]
__________________________________________________________________________________________________
batch_normalization_126 (BatchN (None, 7, 7, 128) 384 conv2d_130[0][0]
__________________________________________________________________________________________________
batch_normalization_131 (BatchN (None, 7, 7, 128) 384 conv2d_135[0][0]
__________________________________________________________________________________________________
activation_126 (Activation) (None, 7, 7, 128) 0 batch_normalization_126[0][0]
__________________________________________________________________________________________________
activation_131 (Activation) (None, 7, 7, 128) 0 batch_normalization_131[0][0]
__________________________________________________________________________________________________
average_pooling2d_12 (AveragePo (None, 7, 7, 768) 0 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_128 (Conv2D) (None, 7, 7, 192) 147456 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_131 (Conv2D) (None, 7, 7, 192) 172032 activation_126[0][0]
__________________________________________________________________________________________________
conv2d_136 (Conv2D) (None, 7, 7, 192) 172032 activation_131[0][0]
__________________________________________________________________________________________________
conv2d_137 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_12[0][0]
__________________________________________________________________________________________________
batch_normalization_124 (BatchN (None, 7, 7, 192) 576 conv2d_128[0][0]
__________________________________________________________________________________________________
batch_normalization_127 (BatchN (None, 7, 7, 192) 576 conv2d_131[0][0]
__________________________________________________________________________________________________
batch_normalization_132 (BatchN (None, 7, 7, 192) 576 conv2d_136[0][0]
__________________________________________________________________________________________________
batch_normalization_133 (BatchN (None, 7, 7, 192) 576 conv2d_137[0][0]
__________________________________________________________________________________________________
activation_124 (Activation) (None, 7, 7, 192) 0 batch_normalization_124[0][0]
__________________________________________________________________________________________________
activation_127 (Activation) (None, 7, 7, 192) 0 batch_normalization_127[0][0]
__________________________________________________________________________________________________
activation_132 (Activation) (None, 7, 7, 192) 0 batch_normalization_132[0][0]
__________________________________________________________________________________________________
activation_133 (Activation) (None, 7, 7, 192) 0 batch_normalization_133[0][0]
__________________________________________________________________________________________________
mixed4 (Concatenate) (None, 7, 7, 768) 0 activation_124[0][0]
activation_127[0][0]
activation_132[0][0]
activation_133[0][0]
__________________________________________________________________________________________________
conv2d_142 (Conv2D) (None, 7, 7, 160) 122880 mixed4[0][0]
__________________________________________________________________________________________________
batch_normalization_138 (BatchN (None, 7, 7, 160) 480 conv2d_142[0][0]
__________________________________________________________________________________________________
activation_138 (Activation) (None, 7, 7, 160) 0 batch_normalization_138[0][0]
__________________________________________________________________________________________________
conv2d_143 (Conv2D) (None, 7, 7, 160) 179200 activation_138[0][0]
__________________________________________________________________________________________________
batch_normalization_139 (BatchN (None, 7, 7, 160) 480 conv2d_143[0][0]
__________________________________________________________________________________________________
activation_139 (Activation) (None, 7, 7, 160) 0 batch_normalization_139[0][0]
__________________________________________________________________________________________________
conv2d_139 (Conv2D) (None, 7, 7, 160) 122880 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_144 (Conv2D) (None, 7, 7, 160) 179200 activation_139[0][0]
__________________________________________________________________________________________________
batch_normalization_135 (BatchN (None, 7, 7, 160) 480 conv2d_139[0][0]
__________________________________________________________________________________________________
batch_normalization_140 (BatchN (None, 7, 7, 160) 480 conv2d_144[0][0]
__________________________________________________________________________________________________
activation_135 (Activation) (None, 7, 7, 160) 0 batch_normalization_135[0][0]
__________________________________________________________________________________________________
activation_140 (Activation) (None, 7, 7, 160) 0 batch_normalization_140[0][0]
__________________________________________________________________________________________________
conv2d_140 (Conv2D) (None, 7, 7, 160) 179200 activation_135[0][0]
__________________________________________________________________________________________________
conv2d_145 (Conv2D) (None, 7, 7, 160) 179200 activation_140[0][0]
__________________________________________________________________________________________________
batch_normalization_136 (BatchN (None, 7, 7, 160) 480 conv2d_140[0][0]
__________________________________________________________________________________________________
batch_normalization_141 (BatchN (None, 7, 7, 160) 480 conv2d_145[0][0]
__________________________________________________________________________________________________
activation_136 (Activation) (None, 7, 7, 160) 0 batch_normalization_136[0][0]
__________________________________________________________________________________________________
activation_141 (Activation) (None, 7, 7, 160) 0 batch_normalization_141[0][0]
__________________________________________________________________________________________________
average_pooling2d_13 (AveragePo (None, 7, 7, 768) 0 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_138 (Conv2D) (None, 7, 7, 192) 147456 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_141 (Conv2D) (None, 7, 7, 192) 215040 activation_136[0][0]
__________________________________________________________________________________________________
conv2d_146 (Conv2D) (None, 7, 7, 192) 215040 activation_141[0][0]
__________________________________________________________________________________________________
conv2d_147 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_13[0][0]
__________________________________________________________________________________________________
batch_normalization_134 (BatchN (None, 7, 7, 192) 576 conv2d_138[0][0]
__________________________________________________________________________________________________
batch_normalization_137 (BatchN (None, 7, 7, 192) 576 conv2d_141[0][0]
__________________________________________________________________________________________________
batch_normalization_142 (BatchN (None, 7, 7, 192) 576 conv2d_146[0][0]
__________________________________________________________________________________________________
batch_normalization_143 (BatchN (None, 7, 7, 192) 576 conv2d_147[0][0]
__________________________________________________________________________________________________
activation_134 (Activation) (None, 7, 7, 192) 0 batch_normalization_134[0][0]
__________________________________________________________________________________________________
activation_137 (Activation) (None, 7, 7, 192) 0 batch_normalization_137[0][0]
__________________________________________________________________________________________________
activation_142 (Activation) (None, 7, 7, 192) 0 batch_normalization_142[0][0]
__________________________________________________________________________________________________
activation_143 (Activation) (None, 7, 7, 192) 0 batch_normalization_143[0][0]
__________________________________________________________________________________________________
mixed5 (Concatenate) (None, 7, 7, 768) 0 activation_134[0][0]
activation_137[0][0]
activation_142[0][0]
activation_143[0][0]
__________________________________________________________________________________________________
conv2d_152 (Conv2D) (None, 7, 7, 160) 122880 mixed5[0][0]
__________________________________________________________________________________________________
batch_normalization_148 (BatchN (None, 7, 7, 160) 480 conv2d_152[0][0]
__________________________________________________________________________________________________
activation_148 (Activation) (None, 7, 7, 160) 0 batch_normalization_148[0][0]
__________________________________________________________________________________________________
conv2d_153 (Conv2D) (None, 7, 7, 160) 179200 activation_148[0][0]
__________________________________________________________________________________________________
batch_normalization_149 (BatchN (None, 7, 7, 160) 480 conv2d_153[0][0]
__________________________________________________________________________________________________
activation_149 (Activation) (None, 7, 7, 160) 0 batch_normalization_149[0][0]
__________________________________________________________________________________________________
conv2d_149 (Conv2D) (None, 7, 7, 160) 122880 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_154 (Conv2D) (None, 7, 7, 160) 179200 activation_149[0][0]
__________________________________________________________________________________________________
batch_normalization_145 (BatchN (None, 7, 7, 160) 480 conv2d_149[0][0]
__________________________________________________________________________________________________
batch_normalization_150 (BatchN (None, 7, 7, 160) 480 conv2d_154[0][0]
__________________________________________________________________________________________________
activation_145 (Activation) (None, 7, 7, 160) 0 batch_normalization_145[0][0]
__________________________________________________________________________________________________
activation_150 (Activation) (None, 7, 7, 160) 0 batch_normalization_150[0][0]
__________________________________________________________________________________________________
conv2d_150 (Conv2D) (None, 7, 7, 160) 179200 activation_145[0][0]
__________________________________________________________________________________________________
conv2d_155 (Conv2D) (None, 7, 7, 160) 179200 activation_150[0][0]
__________________________________________________________________________________________________
batch_normalization_146 (BatchN (None, 7, 7, 160) 480 conv2d_150[0][0]
__________________________________________________________________________________________________
batch_normalization_151 (BatchN (None, 7, 7, 160) 480 conv2d_155[0][0]
__________________________________________________________________________________________________
activation_146 (Activation) (None, 7, 7, 160) 0 batch_normalization_146[0][0]
__________________________________________________________________________________________________
activation_151 (Activation) (None, 7, 7, 160) 0 batch_normalization_151[0][0]
__________________________________________________________________________________________________
average_pooling2d_14 (AveragePo (None, 7, 7, 768) 0 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_148 (Conv2D) (None, 7, 7, 192) 147456 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_151 (Conv2D) (None, 7, 7, 192) 215040 activation_146[0][0]
__________________________________________________________________________________________________
conv2d_156 (Conv2D) (None, 7, 7, 192) 215040 activation_151[0][0]
__________________________________________________________________________________________________
conv2d_157 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_14[0][0]
__________________________________________________________________________________________________
batch_normalization_144 (BatchN (None, 7, 7, 192) 576 conv2d_148[0][0]
__________________________________________________________________________________________________
batch_normalization_147 (BatchN (None, 7, 7, 192) 576 conv2d_151[0][0]
__________________________________________________________________________________________________
batch_normalization_152 (BatchN (None, 7, 7, 192) 576 conv2d_156[0][0]
__________________________________________________________________________________________________
batch_normalization_153 (BatchN (None, 7, 7, 192) 576 conv2d_157[0][0]
__________________________________________________________________________________________________
activation_144 (Activation) (None, 7, 7, 192) 0 batch_normalization_144[0][0]
__________________________________________________________________________________________________
activation_147 (Activation) (None, 7, 7, 192) 0 batch_normalization_147[0][0]
__________________________________________________________________________________________________
activation_152 (Activation) (None, 7, 7, 192) 0 batch_normalization_152[0][0]
__________________________________________________________________________________________________
activation_153 (Activation) (None, 7, 7, 192) 0 batch_normalization_153[0][0]
__________________________________________________________________________________________________
mixed6 (Concatenate) (None, 7, 7, 768) 0 activation_144[0][0]
activation_147[0][0]
activation_152[0][0]
activation_153[0][0]
__________________________________________________________________________________________________
conv2d_162 (Conv2D) (None, 7, 7, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
batch_normalization_158 (BatchN (None, 7, 7, 192) 576 conv2d_162[0][0]
__________________________________________________________________________________________________
activation_158 (Activation) (None, 7, 7, 192) 0 batch_normalization_158[0][0]
__________________________________________________________________________________________________
conv2d_163 (Conv2D) (None, 7, 7, 192) 258048 activation_158[0][0]
__________________________________________________________________________________________________
batch_normalization_159 (BatchN (None, 7, 7, 192) 576 conv2d_163[0][0]
__________________________________________________________________________________________________
activation_159 (Activation) (None, 7, 7, 192) 0 batch_normalization_159[0][0]
__________________________________________________________________________________________________
conv2d_159 (Conv2D) (None, 7, 7, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_164 (Conv2D) (None, 7, 7, 192) 258048 activation_159[0][0]
__________________________________________________________________________________________________
batch_normalization_155 (BatchN (None, 7, 7, 192) 576 conv2d_159[0][0]
__________________________________________________________________________________________________
batch_normalization_160 (BatchN (None, 7, 7, 192) 576 conv2d_164[0][0]
__________________________________________________________________________________________________
activation_155 (Activation) (None, 7, 7, 192) 0 batch_normalization_155[0][0]
__________________________________________________________________________________________________
activation_160 (Activation) (None, 7, 7, 192) 0 batch_normalization_160[0][0]
__________________________________________________________________________________________________
conv2d_160 (Conv2D) (None, 7, 7, 192) 258048 activation_155[0][0]
__________________________________________________________________________________________________
conv2d_165 (Conv2D) (None, 7, 7, 192) 258048 activation_160[0][0]
__________________________________________________________________________________________________
batch_normalization_156 (BatchN (None, 7, 7, 192) 576 conv2d_160[0][0]
__________________________________________________________________________________________________
batch_normalization_161 (BatchN (None, 7, 7, 192) 576 conv2d_165[0][0]
__________________________________________________________________________________________________
activation_156 (Activation) (None, 7, 7, 192) 0 batch_normalization_156[0][0]
__________________________________________________________________________________________________
activation_161 (Activation) (None, 7, 7, 192) 0 batch_normalization_161[0][0]
__________________________________________________________________________________________________
average_pooling2d_15 (AveragePo (None, 7, 7, 768) 0 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_158 (Conv2D) (None, 7, 7, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_161 (Conv2D) (None, 7, 7, 192) 258048 activation_156[0][0]
__________________________________________________________________________________________________
conv2d_166 (Conv2D) (None, 7, 7, 192) 258048 activation_161[0][0]
__________________________________________________________________________________________________
conv2d_167 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_15[0][0]
__________________________________________________________________________________________________
batch_normalization_154 (BatchN (None, 7, 7, 192) 576 conv2d_158[0][0]
__________________________________________________________________________________________________
batch_normalization_157 (BatchN (None, 7, 7, 192) 576 conv2d_161[0][0]
__________________________________________________________________________________________________
batch_normalization_162 (BatchN (None, 7, 7, 192) 576 conv2d_166[0][0]
__________________________________________________________________________________________________
batch_normalization_163 (BatchN (None, 7, 7, 192) 576 conv2d_167[0][0]
__________________________________________________________________________________________________
activation_154 (Activation) (None, 7, 7, 192) 0 batch_normalization_154[0][0]
__________________________________________________________________________________________________
activation_157 (Activation) (None, 7, 7, 192) 0 batch_normalization_157[0][0]
__________________________________________________________________________________________________
activation_162 (Activation) (None, 7, 7, 192) 0 batch_normalization_162[0][0]
__________________________________________________________________________________________________
activation_163 (Activation) (None, 7, 7, 192) 0 batch_normalization_163[0][0]
__________________________________________________________________________________________________
mixed7 (Concatenate) (None, 7, 7, 768) 0 activation_154[0][0]
activation_157[0][0]
activation_162[0][0]
activation_163[0][0]
__________________________________________________________________________________________________
conv2d_170 (Conv2D) (None, 7, 7, 192) 147456 mixed7[0][0]
__________________________________________________________________________________________________
batch_normalization_166 (BatchN (None, 7, 7, 192) 576 conv2d_170[0][0]
__________________________________________________________________________________________________
activation_166 (Activation) (None, 7, 7, 192) 0 batch_normalization_166[0][0]
__________________________________________________________________________________________________
conv2d_171 (Conv2D) (None, 7, 7, 192) 258048 activation_166[0][0]
__________________________________________________________________________________________________
batch_normalization_167 (BatchN (None, 7, 7, 192) 576 conv2d_171[0][0]
__________________________________________________________________________________________________
activation_167 (Activation) (None, 7, 7, 192) 0 batch_normalization_167[0][0]
__________________________________________________________________________________________________
conv2d_168 (Conv2D) (None, 7, 7, 192) 147456 mixed7[0][0]
__________________________________________________________________________________________________
conv2d_172 (Conv2D) (None, 7, 7, 192) 258048 activation_167[0][0]
__________________________________________________________________________________________________
batch_normalization_164 (BatchN (None, 7, 7, 192) 576 conv2d_168[0][0]
__________________________________________________________________________________________________
batch_normalization_168 (BatchN (None, 7, 7, 192) 576 conv2d_172[0][0]
__________________________________________________________________________________________________
activation_164 (Activation) (None, 7, 7, 192) 0 batch_normalization_164[0][0]
__________________________________________________________________________________________________
activation_168 (Activation) (None, 7, 7, 192) 0 batch_normalization_168[0][0]
__________________________________________________________________________________________________
conv2d_169 (Conv2D) (None, 3, 3, 320) 552960 activation_164[0][0]
__________________________________________________________________________________________________
conv2d_173 (Conv2D) (None, 3, 3, 192) 331776 activation_168[0][0]
__________________________________________________________________________________________________
batch_normalization_165 (BatchN (None, 3, 3, 320) 960 conv2d_169[0][0]
__________________________________________________________________________________________________
batch_normalization_169 (BatchN (None, 3, 3, 192) 576 conv2d_173[0][0]
__________________________________________________________________________________________________
activation_165 (Activation) (None, 3, 3, 320) 0 batch_normalization_165[0][0]
__________________________________________________________________________________________________
activation_169 (Activation) (None, 3, 3, 192) 0 batch_normalization_169[0][0]
__________________________________________________________________________________________________
max_pooling2d_11 (MaxPooling2D) (None, 3, 3, 768) 0 mixed7[0][0]
__________________________________________________________________________________________________
mixed8 (Concatenate) (None, 3, 3, 1280) 0 activation_165[0][0]
activation_169[0][0]
max_pooling2d_11[0][0]
__________________________________________________________________________________________________
conv2d_178 (Conv2D) (None, 3, 3, 448) 573440 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_174 (BatchN (None, 3, 3, 448) 1344 conv2d_178[0][0]
__________________________________________________________________________________________________
activation_174 (Activation) (None, 3, 3, 448) 0 batch_normalization_174[0][0]
__________________________________________________________________________________________________
conv2d_175 (Conv2D) (None, 3, 3, 384) 491520 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_179 (Conv2D) (None, 3, 3, 384) 1548288 activation_174[0][0]
__________________________________________________________________________________________________
batch_normalization_171 (BatchN (None, 3, 3, 384) 1152 conv2d_175[0][0]
__________________________________________________________________________________________________
batch_normalization_175 (BatchN (None, 3, 3, 384) 1152 conv2d_179[0][0]
__________________________________________________________________________________________________
activation_171 (Activation) (None, 3, 3, 384) 0 batch_normalization_171[0][0]
__________________________________________________________________________________________________
activation_175 (Activation) (None, 3, 3, 384) 0 batch_normalization_175[0][0]
__________________________________________________________________________________________________
conv2d_176 (Conv2D) (None, 3, 3, 384) 442368 activation_171[0][0]
__________________________________________________________________________________________________
conv2d_177 (Conv2D) (None, 3, 3, 384) 442368 activation_171[0][0]
__________________________________________________________________________________________________
conv2d_180 (Conv2D) (None, 3, 3, 384) 442368 activation_175[0][0]
__________________________________________________________________________________________________
conv2d_181 (Conv2D) (None, 3, 3, 384) 442368 activation_175[0][0]
__________________________________________________________________________________________________
average_pooling2d_16 (AveragePo (None, 3, 3, 1280) 0 mixed8[0][0]
__________________________________________________________________________________________________
conv2d_174 (Conv2D) (None, 3, 3, 320) 409600 mixed8[0][0]
__________________________________________________________________________________________________
batch_normalization_172 (BatchN (None, 3, 3, 384) 1152 conv2d_176[0][0]
__________________________________________________________________________________________________
batch_normalization_173 (BatchN (None, 3, 3, 384) 1152 conv2d_177[0][0]
__________________________________________________________________________________________________
batch_normalization_176 (BatchN (None, 3, 3, 384) 1152 conv2d_180[0][0]
__________________________________________________________________________________________________
batch_normalization_177 (BatchN (None, 3, 3, 384) 1152 conv2d_181[0][0]
__________________________________________________________________________________________________
conv2d_182 (Conv2D) (None, 3, 3, 192) 245760 average_pooling2d_16[0][0]
__________________________________________________________________________________________________
batch_normalization_170 (BatchN (None, 3, 3, 320) 960 conv2d_174[0][0]
__________________________________________________________________________________________________
activation_172 (Activation) (None, 3, 3, 384) 0 batch_normalization_172[0][0]
__________________________________________________________________________________________________
activation_173 (Activation) (None, 3, 3, 384) 0 batch_normalization_173[0][0]
__________________________________________________________________________________________________
activation_176 (Activation) (None, 3, 3, 384) 0 batch_normalization_176[0][0]
__________________________________________________________________________________________________
activation_177 (Activation) (None, 3, 3, 384) 0 batch_normalization_177[0][0]
__________________________________________________________________________________________________
batch_normalization_178 (BatchN (None, 3, 3, 192) 576 conv2d_182[0][0]
__________________________________________________________________________________________________
activation_170 (Activation) (None, 3, 3, 320) 0 batch_normalization_170[0][0]
__________________________________________________________________________________________________
mixed9_0 (Concatenate) (None, 3, 3, 768) 0 activation_172[0][0]
activation_173[0][0]
__________________________________________________________________________________________________
concatenate_2 (Concatenate) (None, 3, 3, 768) 0 activation_176[0][0]
activation_177[0][0]
__________________________________________________________________________________________________
activation_178 (Activation) (None, 3, 3, 192) 0 batch_normalization_178[0][0]
__________________________________________________________________________________________________
mixed9 (Concatenate) (None, 3, 3, 2048) 0 activation_170[0][0]
mixed9_0[0][0]
concatenate_2[0][0]
activation_178[0][0]
__________________________________________________________________________________________________
conv2d_187 (Conv2D) (None, 3, 3, 448) 917504 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_183 (BatchN (None, 3, 3, 448) 1344 conv2d_187[0][0]
__________________________________________________________________________________________________
activation_183 (Activation) (None, 3, 3, 448) 0 batch_normalization_183[0][0]
__________________________________________________________________________________________________
conv2d_184 (Conv2D) (None, 3, 3, 384) 786432 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_188 (Conv2D) (None, 3, 3, 384) 1548288 activation_183[0][0]
__________________________________________________________________________________________________
batch_normalization_180 (BatchN (None, 3, 3, 384) 1152 conv2d_184[0][0]
__________________________________________________________________________________________________
batch_normalization_184 (BatchN (None, 3, 3, 384) 1152 conv2d_188[0][0]
__________________________________________________________________________________________________
activation_180 (Activation) (None, 3, 3, 384) 0 batch_normalization_180[0][0]
__________________________________________________________________________________________________
activation_184 (Activation) (None, 3, 3, 384) 0 batch_normalization_184[0][0]
__________________________________________________________________________________________________
conv2d_185 (Conv2D) (None, 3, 3, 384) 442368 activation_180[0][0]
__________________________________________________________________________________________________
conv2d_186 (Conv2D) (None, 3, 3, 384) 442368 activation_180[0][0]
__________________________________________________________________________________________________
conv2d_189 (Conv2D) (None, 3, 3, 384) 442368 activation_184[0][0]
__________________________________________________________________________________________________
conv2d_190 (Conv2D) (None, 3, 3, 384) 442368 activation_184[0][0]
__________________________________________________________________________________________________
average_pooling2d_17 (AveragePo (None, 3, 3, 2048) 0 mixed9[0][0]
__________________________________________________________________________________________________
conv2d_183 (Conv2D) (None, 3, 3, 320) 655360 mixed9[0][0]
__________________________________________________________________________________________________
batch_normalization_181 (BatchN (None, 3, 3, 384) 1152 conv2d_185[0][0]
__________________________________________________________________________________________________
batch_normalization_182 (BatchN (None, 3, 3, 384) 1152 conv2d_186[0][0]
__________________________________________________________________________________________________
batch_normalization_185 (BatchN (None, 3, 3, 384) 1152 conv2d_189[0][0]
__________________________________________________________________________________________________
batch_normalization_186 (BatchN (None, 3, 3, 384) 1152 conv2d_190[0][0]
__________________________________________________________________________________________________
conv2d_191 (Conv2D) (None, 3, 3, 192) 393216 average_pooling2d_17[0][0]
__________________________________________________________________________________________________
batch_normalization_179 (BatchN (None, 3, 3, 320) 960 conv2d_183[0][0]
__________________________________________________________________________________________________
activation_181 (Activation) (None, 3, 3, 384) 0 batch_normalization_181[0][0]
__________________________________________________________________________________________________
activation_182 (Activation) (None, 3, 3, 384) 0 batch_normalization_182[0][0]
__________________________________________________________________________________________________
activation_185 (Activation) (None, 3, 3, 384) 0 batch_normalization_185[0][0]
__________________________________________________________________________________________________
activation_186 (Activation) (None, 3, 3, 384) 0 batch_normalization_186[0][0]
__________________________________________________________________________________________________
batch_normalization_187 (BatchN (None, 3, 3, 192) 576 conv2d_191[0][0]
__________________________________________________________________________________________________
activation_179 (Activation) (None, 3, 3, 320) 0 batch_normalization_179[0][0]
__________________________________________________________________________________________________
mixed9_1 (Concatenate) (None, 3, 3, 768) 0 activation_181[0][0]
activation_182[0][0]
__________________________________________________________________________________________________
concatenate_3 (Concatenate) (None, 3, 3, 768) 0 activation_185[0][0]
activation_186[0][0]
__________________________________________________________________________________________________
activation_187 (Activation) (None, 3, 3, 192) 0 batch_normalization_187[0][0]
__________________________________________________________________________________________________
mixed10 (Concatenate) (None, 3, 3, 2048) 0 activation_179[0][0]
mixed9_1[0][0]
concatenate_3[0][0]
activation_187[0][0]
==================================================================================================
Total params: 21,802,784
Trainable params: 21,768,352
Non-trainable params: 34,432
__________________________________________________________________________________________________
We select one layer : for example mixed8 layer.
# Select the layer
last_layer = pre_trained_model.get_layer('mixed8')
# Print the output shape of the layer
print('last layer output shape :', last_layer.output_shape)
# Retrieve le output of the layer : tensor 4D
last_output = last_layer.output
last layer output shape : (None, 3, 3, 1280)
We will use the Droupout layer to regularize the model
# Flatten the output layer to 1 dimension
x = tf.keras.layers.Flatten()(last_output)
# Add a fully connected layer with 1024 hidden units and ReLU activation
x = tf.keras.layers.Dense(units = 512, activation = 'relu')(x)
# Add a dropout rate of 0.5
x = tf.keras.layers.Dropout(rate = 0.5)(x)
# Add a final sigmoid layer for classification
outputs = tf.keras.layers.Dense(units = 1, activation = 'sigmoid')(x)
# Building of the model
model = Model(inputs = pre_trained_model.input, outputs = outputs)
# Compile the model
model.compile(optimizer=RMSprop(lr=1e-4), loss='binary_crossentropy', metrics = ['accuracy'])
# Summary
model.summary()
Model: "functional_11"
__________________________________________________________________________________________________
Layer (type) Output Shape Param # Connected to
==================================================================================================
input_3 (InputLayer) [(None, 150, 150, 3) 0
__________________________________________________________________________________________________
conv2d_98 (Conv2D) (None, 74, 74, 32) 864 input_3[0][0]
__________________________________________________________________________________________________
batch_normalization_94 (BatchNo (None, 74, 74, 32) 96 conv2d_98[0][0]
__________________________________________________________________________________________________
activation_94 (Activation) (None, 74, 74, 32) 0 batch_normalization_94[0][0]
__________________________________________________________________________________________________
conv2d_99 (Conv2D) (None, 72, 72, 32) 9216 activation_94[0][0]
__________________________________________________________________________________________________
batch_normalization_95 (BatchNo (None, 72, 72, 32) 96 conv2d_99[0][0]
__________________________________________________________________________________________________
activation_95 (Activation) (None, 72, 72, 32) 0 batch_normalization_95[0][0]
__________________________________________________________________________________________________
conv2d_100 (Conv2D) (None, 72, 72, 64) 18432 activation_95[0][0]
__________________________________________________________________________________________________
batch_normalization_96 (BatchNo (None, 72, 72, 64) 192 conv2d_100[0][0]
__________________________________________________________________________________________________
activation_96 (Activation) (None, 72, 72, 64) 0 batch_normalization_96[0][0]
__________________________________________________________________________________________________
max_pooling2d_8 (MaxPooling2D) (None, 35, 35, 64) 0 activation_96[0][0]
__________________________________________________________________________________________________
conv2d_101 (Conv2D) (None, 35, 35, 80) 5120 max_pooling2d_8[0][0]
__________________________________________________________________________________________________
batch_normalization_97 (BatchNo (None, 35, 35, 80) 240 conv2d_101[0][0]
__________________________________________________________________________________________________
activation_97 (Activation) (None, 35, 35, 80) 0 batch_normalization_97[0][0]
__________________________________________________________________________________________________
conv2d_102 (Conv2D) (None, 33, 33, 192) 138240 activation_97[0][0]
__________________________________________________________________________________________________
batch_normalization_98 (BatchNo (None, 33, 33, 192) 576 conv2d_102[0][0]
__________________________________________________________________________________________________
activation_98 (Activation) (None, 33, 33, 192) 0 batch_normalization_98[0][0]
__________________________________________________________________________________________________
max_pooling2d_9 (MaxPooling2D) (None, 16, 16, 192) 0 activation_98[0][0]
__________________________________________________________________________________________________
conv2d_106 (Conv2D) (None, 16, 16, 64) 12288 max_pooling2d_9[0][0]
__________________________________________________________________________________________________
batch_normalization_102 (BatchN (None, 16, 16, 64) 192 conv2d_106[0][0]
__________________________________________________________________________________________________
activation_102 (Activation) (None, 16, 16, 64) 0 batch_normalization_102[0][0]
__________________________________________________________________________________________________
conv2d_104 (Conv2D) (None, 16, 16, 48) 9216 max_pooling2d_9[0][0]
__________________________________________________________________________________________________
conv2d_107 (Conv2D) (None, 16, 16, 96) 55296 activation_102[0][0]
__________________________________________________________________________________________________
batch_normalization_100 (BatchN (None, 16, 16, 48) 144 conv2d_104[0][0]
__________________________________________________________________________________________________
batch_normalization_103 (BatchN (None, 16, 16, 96) 288 conv2d_107[0][0]
__________________________________________________________________________________________________
activation_100 (Activation) (None, 16, 16, 48) 0 batch_normalization_100[0][0]
__________________________________________________________________________________________________
activation_103 (Activation) (None, 16, 16, 96) 0 batch_normalization_103[0][0]
__________________________________________________________________________________________________
average_pooling2d_9 (AveragePoo (None, 16, 16, 192) 0 max_pooling2d_9[0][0]
__________________________________________________________________________________________________
conv2d_103 (Conv2D) (None, 16, 16, 64) 12288 max_pooling2d_9[0][0]
__________________________________________________________________________________________________
conv2d_105 (Conv2D) (None, 16, 16, 64) 76800 activation_100[0][0]
__________________________________________________________________________________________________
conv2d_108 (Conv2D) (None, 16, 16, 96) 82944 activation_103[0][0]
__________________________________________________________________________________________________
conv2d_109 (Conv2D) (None, 16, 16, 32) 6144 average_pooling2d_9[0][0]
__________________________________________________________________________________________________
batch_normalization_99 (BatchNo (None, 16, 16, 64) 192 conv2d_103[0][0]
__________________________________________________________________________________________________
batch_normalization_101 (BatchN (None, 16, 16, 64) 192 conv2d_105[0][0]
__________________________________________________________________________________________________
batch_normalization_104 (BatchN (None, 16, 16, 96) 288 conv2d_108[0][0]
__________________________________________________________________________________________________
batch_normalization_105 (BatchN (None, 16, 16, 32) 96 conv2d_109[0][0]
__________________________________________________________________________________________________
activation_99 (Activation) (None, 16, 16, 64) 0 batch_normalization_99[0][0]
__________________________________________________________________________________________________
activation_101 (Activation) (None, 16, 16, 64) 0 batch_normalization_101[0][0]
__________________________________________________________________________________________________
activation_104 (Activation) (None, 16, 16, 96) 0 batch_normalization_104[0][0]
__________________________________________________________________________________________________
activation_105 (Activation) (None, 16, 16, 32) 0 batch_normalization_105[0][0]
__________________________________________________________________________________________________
mixed0 (Concatenate) (None, 16, 16, 256) 0 activation_99[0][0]
activation_101[0][0]
activation_104[0][0]
activation_105[0][0]
__________________________________________________________________________________________________
conv2d_113 (Conv2D) (None, 16, 16, 64) 16384 mixed0[0][0]
__________________________________________________________________________________________________
batch_normalization_109 (BatchN (None, 16, 16, 64) 192 conv2d_113[0][0]
__________________________________________________________________________________________________
activation_109 (Activation) (None, 16, 16, 64) 0 batch_normalization_109[0][0]
__________________________________________________________________________________________________
conv2d_111 (Conv2D) (None, 16, 16, 48) 12288 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_114 (Conv2D) (None, 16, 16, 96) 55296 activation_109[0][0]
__________________________________________________________________________________________________
batch_normalization_107 (BatchN (None, 16, 16, 48) 144 conv2d_111[0][0]
__________________________________________________________________________________________________
batch_normalization_110 (BatchN (None, 16, 16, 96) 288 conv2d_114[0][0]
__________________________________________________________________________________________________
activation_107 (Activation) (None, 16, 16, 48) 0 batch_normalization_107[0][0]
__________________________________________________________________________________________________
activation_110 (Activation) (None, 16, 16, 96) 0 batch_normalization_110[0][0]
__________________________________________________________________________________________________
average_pooling2d_10 (AveragePo (None, 16, 16, 256) 0 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_110 (Conv2D) (None, 16, 16, 64) 16384 mixed0[0][0]
__________________________________________________________________________________________________
conv2d_112 (Conv2D) (None, 16, 16, 64) 76800 activation_107[0][0]
__________________________________________________________________________________________________
conv2d_115 (Conv2D) (None, 16, 16, 96) 82944 activation_110[0][0]
__________________________________________________________________________________________________
conv2d_116 (Conv2D) (None, 16, 16, 64) 16384 average_pooling2d_10[0][0]
__________________________________________________________________________________________________
batch_normalization_106 (BatchN (None, 16, 16, 64) 192 conv2d_110[0][0]
__________________________________________________________________________________________________
batch_normalization_108 (BatchN (None, 16, 16, 64) 192 conv2d_112[0][0]
__________________________________________________________________________________________________
batch_normalization_111 (BatchN (None, 16, 16, 96) 288 conv2d_115[0][0]
__________________________________________________________________________________________________
batch_normalization_112 (BatchN (None, 16, 16, 64) 192 conv2d_116[0][0]
__________________________________________________________________________________________________
activation_106 (Activation) (None, 16, 16, 64) 0 batch_normalization_106[0][0]
__________________________________________________________________________________________________
activation_108 (Activation) (None, 16, 16, 64) 0 batch_normalization_108[0][0]
__________________________________________________________________________________________________
activation_111 (Activation) (None, 16, 16, 96) 0 batch_normalization_111[0][0]
__________________________________________________________________________________________________
activation_112 (Activation) (None, 16, 16, 64) 0 batch_normalization_112[0][0]
__________________________________________________________________________________________________
mixed1 (Concatenate) (None, 16, 16, 288) 0 activation_106[0][0]
activation_108[0][0]
activation_111[0][0]
activation_112[0][0]
__________________________________________________________________________________________________
conv2d_120 (Conv2D) (None, 16, 16, 64) 18432 mixed1[0][0]
__________________________________________________________________________________________________
batch_normalization_116 (BatchN (None, 16, 16, 64) 192 conv2d_120[0][0]
__________________________________________________________________________________________________
activation_116 (Activation) (None, 16, 16, 64) 0 batch_normalization_116[0][0]
__________________________________________________________________________________________________
conv2d_118 (Conv2D) (None, 16, 16, 48) 13824 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_121 (Conv2D) (None, 16, 16, 96) 55296 activation_116[0][0]
__________________________________________________________________________________________________
batch_normalization_114 (BatchN (None, 16, 16, 48) 144 conv2d_118[0][0]
__________________________________________________________________________________________________
batch_normalization_117 (BatchN (None, 16, 16, 96) 288 conv2d_121[0][0]
__________________________________________________________________________________________________
activation_114 (Activation) (None, 16, 16, 48) 0 batch_normalization_114[0][0]
__________________________________________________________________________________________________
activation_117 (Activation) (None, 16, 16, 96) 0 batch_normalization_117[0][0]
__________________________________________________________________________________________________
average_pooling2d_11 (AveragePo (None, 16, 16, 288) 0 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_117 (Conv2D) (None, 16, 16, 64) 18432 mixed1[0][0]
__________________________________________________________________________________________________
conv2d_119 (Conv2D) (None, 16, 16, 64) 76800 activation_114[0][0]
__________________________________________________________________________________________________
conv2d_122 (Conv2D) (None, 16, 16, 96) 82944 activation_117[0][0]
__________________________________________________________________________________________________
conv2d_123 (Conv2D) (None, 16, 16, 64) 18432 average_pooling2d_11[0][0]
__________________________________________________________________________________________________
batch_normalization_113 (BatchN (None, 16, 16, 64) 192 conv2d_117[0][0]
__________________________________________________________________________________________________
batch_normalization_115 (BatchN (None, 16, 16, 64) 192 conv2d_119[0][0]
__________________________________________________________________________________________________
batch_normalization_118 (BatchN (None, 16, 16, 96) 288 conv2d_122[0][0]
__________________________________________________________________________________________________
batch_normalization_119 (BatchN (None, 16, 16, 64) 192 conv2d_123[0][0]
__________________________________________________________________________________________________
activation_113 (Activation) (None, 16, 16, 64) 0 batch_normalization_113[0][0]
__________________________________________________________________________________________________
activation_115 (Activation) (None, 16, 16, 64) 0 batch_normalization_115[0][0]
__________________________________________________________________________________________________
activation_118 (Activation) (None, 16, 16, 96) 0 batch_normalization_118[0][0]
__________________________________________________________________________________________________
activation_119 (Activation) (None, 16, 16, 64) 0 batch_normalization_119[0][0]
__________________________________________________________________________________________________
mixed2 (Concatenate) (None, 16, 16, 288) 0 activation_113[0][0]
activation_115[0][0]
activation_118[0][0]
activation_119[0][0]
__________________________________________________________________________________________________
conv2d_125 (Conv2D) (None, 16, 16, 64) 18432 mixed2[0][0]
__________________________________________________________________________________________________
batch_normalization_121 (BatchN (None, 16, 16, 64) 192 conv2d_125[0][0]
__________________________________________________________________________________________________
activation_121 (Activation) (None, 16, 16, 64) 0 batch_normalization_121[0][0]
__________________________________________________________________________________________________
conv2d_126 (Conv2D) (None, 16, 16, 96) 55296 activation_121[0][0]
__________________________________________________________________________________________________
batch_normalization_122 (BatchN (None, 16, 16, 96) 288 conv2d_126[0][0]
__________________________________________________________________________________________________
activation_122 (Activation) (None, 16, 16, 96) 0 batch_normalization_122[0][0]
__________________________________________________________________________________________________
conv2d_124 (Conv2D) (None, 7, 7, 384) 995328 mixed2[0][0]
__________________________________________________________________________________________________
conv2d_127 (Conv2D) (None, 7, 7, 96) 82944 activation_122[0][0]
__________________________________________________________________________________________________
batch_normalization_120 (BatchN (None, 7, 7, 384) 1152 conv2d_124[0][0]
__________________________________________________________________________________________________
batch_normalization_123 (BatchN (None, 7, 7, 96) 288 conv2d_127[0][0]
__________________________________________________________________________________________________
activation_120 (Activation) (None, 7, 7, 384) 0 batch_normalization_120[0][0]
__________________________________________________________________________________________________
activation_123 (Activation) (None, 7, 7, 96) 0 batch_normalization_123[0][0]
__________________________________________________________________________________________________
max_pooling2d_10 (MaxPooling2D) (None, 7, 7, 288) 0 mixed2[0][0]
__________________________________________________________________________________________________
mixed3 (Concatenate) (None, 7, 7, 768) 0 activation_120[0][0]
activation_123[0][0]
max_pooling2d_10[0][0]
__________________________________________________________________________________________________
conv2d_132 (Conv2D) (None, 7, 7, 128) 98304 mixed3[0][0]
__________________________________________________________________________________________________
batch_normalization_128 (BatchN (None, 7, 7, 128) 384 conv2d_132[0][0]
__________________________________________________________________________________________________
activation_128 (Activation) (None, 7, 7, 128) 0 batch_normalization_128[0][0]
__________________________________________________________________________________________________
conv2d_133 (Conv2D) (None, 7, 7, 128) 114688 activation_128[0][0]
__________________________________________________________________________________________________
batch_normalization_129 (BatchN (None, 7, 7, 128) 384 conv2d_133[0][0]
__________________________________________________________________________________________________
activation_129 (Activation) (None, 7, 7, 128) 0 batch_normalization_129[0][0]
__________________________________________________________________________________________________
conv2d_129 (Conv2D) (None, 7, 7, 128) 98304 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_134 (Conv2D) (None, 7, 7, 128) 114688 activation_129[0][0]
__________________________________________________________________________________________________
batch_normalization_125 (BatchN (None, 7, 7, 128) 384 conv2d_129[0][0]
__________________________________________________________________________________________________
batch_normalization_130 (BatchN (None, 7, 7, 128) 384 conv2d_134[0][0]
__________________________________________________________________________________________________
activation_125 (Activation) (None, 7, 7, 128) 0 batch_normalization_125[0][0]
__________________________________________________________________________________________________
activation_130 (Activation) (None, 7, 7, 128) 0 batch_normalization_130[0][0]
__________________________________________________________________________________________________
conv2d_130 (Conv2D) (None, 7, 7, 128) 114688 activation_125[0][0]
__________________________________________________________________________________________________
conv2d_135 (Conv2D) (None, 7, 7, 128) 114688 activation_130[0][0]
__________________________________________________________________________________________________
batch_normalization_126 (BatchN (None, 7, 7, 128) 384 conv2d_130[0][0]
__________________________________________________________________________________________________
batch_normalization_131 (BatchN (None, 7, 7, 128) 384 conv2d_135[0][0]
__________________________________________________________________________________________________
activation_126 (Activation) (None, 7, 7, 128) 0 batch_normalization_126[0][0]
__________________________________________________________________________________________________
activation_131 (Activation) (None, 7, 7, 128) 0 batch_normalization_131[0][0]
__________________________________________________________________________________________________
average_pooling2d_12 (AveragePo (None, 7, 7, 768) 0 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_128 (Conv2D) (None, 7, 7, 192) 147456 mixed3[0][0]
__________________________________________________________________________________________________
conv2d_131 (Conv2D) (None, 7, 7, 192) 172032 activation_126[0][0]
__________________________________________________________________________________________________
conv2d_136 (Conv2D) (None, 7, 7, 192) 172032 activation_131[0][0]
__________________________________________________________________________________________________
conv2d_137 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_12[0][0]
__________________________________________________________________________________________________
batch_normalization_124 (BatchN (None, 7, 7, 192) 576 conv2d_128[0][0]
__________________________________________________________________________________________________
batch_normalization_127 (BatchN (None, 7, 7, 192) 576 conv2d_131[0][0]
__________________________________________________________________________________________________
batch_normalization_132 (BatchN (None, 7, 7, 192) 576 conv2d_136[0][0]
__________________________________________________________________________________________________
batch_normalization_133 (BatchN (None, 7, 7, 192) 576 conv2d_137[0][0]
__________________________________________________________________________________________________
activation_124 (Activation) (None, 7, 7, 192) 0 batch_normalization_124[0][0]
__________________________________________________________________________________________________
activation_127 (Activation) (None, 7, 7, 192) 0 batch_normalization_127[0][0]
__________________________________________________________________________________________________
activation_132 (Activation) (None, 7, 7, 192) 0 batch_normalization_132[0][0]
__________________________________________________________________________________________________
activation_133 (Activation) (None, 7, 7, 192) 0 batch_normalization_133[0][0]
__________________________________________________________________________________________________
mixed4 (Concatenate) (None, 7, 7, 768) 0 activation_124[0][0]
activation_127[0][0]
activation_132[0][0]
activation_133[0][0]
__________________________________________________________________________________________________
conv2d_142 (Conv2D) (None, 7, 7, 160) 122880 mixed4[0][0]
__________________________________________________________________________________________________
batch_normalization_138 (BatchN (None, 7, 7, 160) 480 conv2d_142[0][0]
__________________________________________________________________________________________________
activation_138 (Activation) (None, 7, 7, 160) 0 batch_normalization_138[0][0]
__________________________________________________________________________________________________
conv2d_143 (Conv2D) (None, 7, 7, 160) 179200 activation_138[0][0]
__________________________________________________________________________________________________
batch_normalization_139 (BatchN (None, 7, 7, 160) 480 conv2d_143[0][0]
__________________________________________________________________________________________________
activation_139 (Activation) (None, 7, 7, 160) 0 batch_normalization_139[0][0]
__________________________________________________________________________________________________
conv2d_139 (Conv2D) (None, 7, 7, 160) 122880 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_144 (Conv2D) (None, 7, 7, 160) 179200 activation_139[0][0]
__________________________________________________________________________________________________
batch_normalization_135 (BatchN (None, 7, 7, 160) 480 conv2d_139[0][0]
__________________________________________________________________________________________________
batch_normalization_140 (BatchN (None, 7, 7, 160) 480 conv2d_144[0][0]
__________________________________________________________________________________________________
activation_135 (Activation) (None, 7, 7, 160) 0 batch_normalization_135[0][0]
__________________________________________________________________________________________________
activation_140 (Activation) (None, 7, 7, 160) 0 batch_normalization_140[0][0]
__________________________________________________________________________________________________
conv2d_140 (Conv2D) (None, 7, 7, 160) 179200 activation_135[0][0]
__________________________________________________________________________________________________
conv2d_145 (Conv2D) (None, 7, 7, 160) 179200 activation_140[0][0]
__________________________________________________________________________________________________
batch_normalization_136 (BatchN (None, 7, 7, 160) 480 conv2d_140[0][0]
__________________________________________________________________________________________________
batch_normalization_141 (BatchN (None, 7, 7, 160) 480 conv2d_145[0][0]
__________________________________________________________________________________________________
activation_136 (Activation) (None, 7, 7, 160) 0 batch_normalization_136[0][0]
__________________________________________________________________________________________________
activation_141 (Activation) (None, 7, 7, 160) 0 batch_normalization_141[0][0]
__________________________________________________________________________________________________
average_pooling2d_13 (AveragePo (None, 7, 7, 768) 0 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_138 (Conv2D) (None, 7, 7, 192) 147456 mixed4[0][0]
__________________________________________________________________________________________________
conv2d_141 (Conv2D) (None, 7, 7, 192) 215040 activation_136[0][0]
__________________________________________________________________________________________________
conv2d_146 (Conv2D) (None, 7, 7, 192) 215040 activation_141[0][0]
__________________________________________________________________________________________________
conv2d_147 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_13[0][0]
__________________________________________________________________________________________________
batch_normalization_134 (BatchN (None, 7, 7, 192) 576 conv2d_138[0][0]
__________________________________________________________________________________________________
batch_normalization_137 (BatchN (None, 7, 7, 192) 576 conv2d_141[0][0]
__________________________________________________________________________________________________
batch_normalization_142 (BatchN (None, 7, 7, 192) 576 conv2d_146[0][0]
__________________________________________________________________________________________________
batch_normalization_143 (BatchN (None, 7, 7, 192) 576 conv2d_147[0][0]
__________________________________________________________________________________________________
activation_134 (Activation) (None, 7, 7, 192) 0 batch_normalization_134[0][0]
__________________________________________________________________________________________________
activation_137 (Activation) (None, 7, 7, 192) 0 batch_normalization_137[0][0]
__________________________________________________________________________________________________
activation_142 (Activation) (None, 7, 7, 192) 0 batch_normalization_142[0][0]
__________________________________________________________________________________________________
activation_143 (Activation) (None, 7, 7, 192) 0 batch_normalization_143[0][0]
__________________________________________________________________________________________________
mixed5 (Concatenate) (None, 7, 7, 768) 0 activation_134[0][0]
activation_137[0][0]
activation_142[0][0]
activation_143[0][0]
__________________________________________________________________________________________________
conv2d_152 (Conv2D) (None, 7, 7, 160) 122880 mixed5[0][0]
__________________________________________________________________________________________________
batch_normalization_148 (BatchN (None, 7, 7, 160) 480 conv2d_152[0][0]
__________________________________________________________________________________________________
activation_148 (Activation) (None, 7, 7, 160) 0 batch_normalization_148[0][0]
__________________________________________________________________________________________________
conv2d_153 (Conv2D) (None, 7, 7, 160) 179200 activation_148[0][0]
__________________________________________________________________________________________________
batch_normalization_149 (BatchN (None, 7, 7, 160) 480 conv2d_153[0][0]
__________________________________________________________________________________________________
activation_149 (Activation) (None, 7, 7, 160) 0 batch_normalization_149[0][0]
__________________________________________________________________________________________________
conv2d_149 (Conv2D) (None, 7, 7, 160) 122880 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_154 (Conv2D) (None, 7, 7, 160) 179200 activation_149[0][0]
__________________________________________________________________________________________________
batch_normalization_145 (BatchN (None, 7, 7, 160) 480 conv2d_149[0][0]
__________________________________________________________________________________________________
batch_normalization_150 (BatchN (None, 7, 7, 160) 480 conv2d_154[0][0]
__________________________________________________________________________________________________
activation_145 (Activation) (None, 7, 7, 160) 0 batch_normalization_145[0][0]
__________________________________________________________________________________________________
activation_150 (Activation) (None, 7, 7, 160) 0 batch_normalization_150[0][0]
__________________________________________________________________________________________________
conv2d_150 (Conv2D) (None, 7, 7, 160) 179200 activation_145[0][0]
__________________________________________________________________________________________________
conv2d_155 (Conv2D) (None, 7, 7, 160) 179200 activation_150[0][0]
__________________________________________________________________________________________________
batch_normalization_146 (BatchN (None, 7, 7, 160) 480 conv2d_150[0][0]
__________________________________________________________________________________________________
batch_normalization_151 (BatchN (None, 7, 7, 160) 480 conv2d_155[0][0]
__________________________________________________________________________________________________
activation_146 (Activation) (None, 7, 7, 160) 0 batch_normalization_146[0][0]
__________________________________________________________________________________________________
activation_151 (Activation) (None, 7, 7, 160) 0 batch_normalization_151[0][0]
__________________________________________________________________________________________________
average_pooling2d_14 (AveragePo (None, 7, 7, 768) 0 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_148 (Conv2D) (None, 7, 7, 192) 147456 mixed5[0][0]
__________________________________________________________________________________________________
conv2d_151 (Conv2D) (None, 7, 7, 192) 215040 activation_146[0][0]
__________________________________________________________________________________________________
conv2d_156 (Conv2D) (None, 7, 7, 192) 215040 activation_151[0][0]
__________________________________________________________________________________________________
conv2d_157 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_14[0][0]
__________________________________________________________________________________________________
batch_normalization_144 (BatchN (None, 7, 7, 192) 576 conv2d_148[0][0]
__________________________________________________________________________________________________
batch_normalization_147 (BatchN (None, 7, 7, 192) 576 conv2d_151[0][0]
__________________________________________________________________________________________________
batch_normalization_152 (BatchN (None, 7, 7, 192) 576 conv2d_156[0][0]
__________________________________________________________________________________________________
batch_normalization_153 (BatchN (None, 7, 7, 192) 576 conv2d_157[0][0]
__________________________________________________________________________________________________
activation_144 (Activation) (None, 7, 7, 192) 0 batch_normalization_144[0][0]
__________________________________________________________________________________________________
activation_147 (Activation) (None, 7, 7, 192) 0 batch_normalization_147[0][0]
__________________________________________________________________________________________________
activation_152 (Activation) (None, 7, 7, 192) 0 batch_normalization_152[0][0]
__________________________________________________________________________________________________
activation_153 (Activation) (None, 7, 7, 192) 0 batch_normalization_153[0][0]
__________________________________________________________________________________________________
mixed6 (Concatenate) (None, 7, 7, 768) 0 activation_144[0][0]
activation_147[0][0]
activation_152[0][0]
activation_153[0][0]
__________________________________________________________________________________________________
conv2d_162 (Conv2D) (None, 7, 7, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
batch_normalization_158 (BatchN (None, 7, 7, 192) 576 conv2d_162[0][0]
__________________________________________________________________________________________________
activation_158 (Activation) (None, 7, 7, 192) 0 batch_normalization_158[0][0]
__________________________________________________________________________________________________
conv2d_163 (Conv2D) (None, 7, 7, 192) 258048 activation_158[0][0]
__________________________________________________________________________________________________
batch_normalization_159 (BatchN (None, 7, 7, 192) 576 conv2d_163[0][0]
__________________________________________________________________________________________________
activation_159 (Activation) (None, 7, 7, 192) 0 batch_normalization_159[0][0]
__________________________________________________________________________________________________
conv2d_159 (Conv2D) (None, 7, 7, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_164 (Conv2D) (None, 7, 7, 192) 258048 activation_159[0][0]
__________________________________________________________________________________________________
batch_normalization_155 (BatchN (None, 7, 7, 192) 576 conv2d_159[0][0]
__________________________________________________________________________________________________
batch_normalization_160 (BatchN (None, 7, 7, 192) 576 conv2d_164[0][0]
__________________________________________________________________________________________________
activation_155 (Activation) (None, 7, 7, 192) 0 batch_normalization_155[0][0]
__________________________________________________________________________________________________
activation_160 (Activation) (None, 7, 7, 192) 0 batch_normalization_160[0][0]
__________________________________________________________________________________________________
conv2d_160 (Conv2D) (None, 7, 7, 192) 258048 activation_155[0][0]
__________________________________________________________________________________________________
conv2d_165 (Conv2D) (None, 7, 7, 192) 258048 activation_160[0][0]
__________________________________________________________________________________________________
batch_normalization_156 (BatchN (None, 7, 7, 192) 576 conv2d_160[0][0]
__________________________________________________________________________________________________
batch_normalization_161 (BatchN (None, 7, 7, 192) 576 conv2d_165[0][0]
__________________________________________________________________________________________________
activation_156 (Activation) (None, 7, 7, 192) 0 batch_normalization_156[0][0]
__________________________________________________________________________________________________
activation_161 (Activation) (None, 7, 7, 192) 0 batch_normalization_161[0][0]
__________________________________________________________________________________________________
average_pooling2d_15 (AveragePo (None, 7, 7, 768) 0 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_158 (Conv2D) (None, 7, 7, 192) 147456 mixed6[0][0]
__________________________________________________________________________________________________
conv2d_161 (Conv2D) (None, 7, 7, 192) 258048 activation_156[0][0]
__________________________________________________________________________________________________
conv2d_166 (Conv2D) (None, 7, 7, 192) 258048 activation_161[0][0]
__________________________________________________________________________________________________
conv2d_167 (Conv2D) (None, 7, 7, 192) 147456 average_pooling2d_15[0][0]
__________________________________________________________________________________________________
batch_normalization_154 (BatchN (None, 7, 7, 192) 576 conv2d_158[0][0]
__________________________________________________________________________________________________
batch_normalization_157 (BatchN (None, 7, 7, 192) 576 conv2d_161[0][0]
__________________________________________________________________________________________________
batch_normalization_162 (BatchN (None, 7, 7, 192) 576 conv2d_166[0][0]
__________________________________________________________________________________________________
batch_normalization_163 (BatchN (None, 7, 7, 192) 576 conv2d_167[0][0]
__________________________________________________________________________________________________
activation_154 (Activation) (None, 7, 7, 192) 0 batch_normalization_154[0][0]
__________________________________________________________________________________________________
activation_157 (Activation) (None, 7, 7, 192) 0 batch_normalization_157[0][0]
__________________________________________________________________________________________________
activation_162 (Activation) (None, 7, 7, 192) 0 batch_normalization_162[0][0]
__________________________________________________________________________________________________
activation_163 (Activation) (None, 7, 7, 192) 0 batch_normalization_163[0][0]
__________________________________________________________________________________________________
mixed7 (Concatenate) (None, 7, 7, 768) 0 activation_154[0][0]
activation_157[0][0]
activation_162[0][0]
activation_163[0][0]
__________________________________________________________________________________________________
conv2d_170 (Conv2D) (None, 7, 7, 192) 147456 mixed7[0][0]
__________________________________________________________________________________________________
batch_normalization_166 (BatchN (None, 7, 7, 192) 576 conv2d_170[0][0]
__________________________________________________________________________________________________
activation_166 (Activation) (None, 7, 7, 192) 0 batch_normalization_166[0][0]
__________________________________________________________________________________________________
conv2d_171 (Conv2D) (None, 7, 7, 192) 258048 activation_166[0][0]
__________________________________________________________________________________________________
batch_normalization_167 (BatchN (None, 7, 7, 192) 576 conv2d_171[0][0]
__________________________________________________________________________________________________
activation_167 (Activation) (None, 7, 7, 192) 0 batch_normalization_167[0][0]
__________________________________________________________________________________________________
conv2d_168 (Conv2D) (None, 7, 7, 192) 147456 mixed7[0][0]
__________________________________________________________________________________________________
conv2d_172 (Conv2D) (None, 7, 7, 192) 258048 activation_167[0][0]
__________________________________________________________________________________________________
batch_normalization_164 (BatchN (None, 7, 7, 192) 576 conv2d_168[0][0]
__________________________________________________________________________________________________
batch_normalization_168 (BatchN (None, 7, 7, 192) 576 conv2d_172[0][0]
__________________________________________________________________________________________________
activation_164 (Activation) (None, 7, 7, 192) 0 batch_normalization_164[0][0]
__________________________________________________________________________________________________
activation_168 (Activation) (None, 7, 7, 192) 0 batch_normalization_168[0][0]
__________________________________________________________________________________________________
conv2d_169 (Conv2D) (None, 3, 3, 320) 552960 activation_164[0][0]
__________________________________________________________________________________________________
conv2d_173 (Conv2D) (None, 3, 3, 192) 331776 activation_168[0][0]
__________________________________________________________________________________________________
batch_normalization_165 (BatchN (None, 3, 3, 320) 960 conv2d_169[0][0]
__________________________________________________________________________________________________
batch_normalization_169 (BatchN (None, 3, 3, 192) 576 conv2d_173[0][0]
__________________________________________________________________________________________________
activation_165 (Activation) (None, 3, 3, 320) 0 batch_normalization_165[0][0]
__________________________________________________________________________________________________
activation_169 (Activation) (None, 3, 3, 192) 0 batch_normalization_169[0][0]
__________________________________________________________________________________________________
max_pooling2d_11 (MaxPooling2D) (None, 3, 3, 768) 0 mixed7[0][0]
__________________________________________________________________________________________________
mixed8 (Concatenate) (None, 3, 3, 1280) 0 activation_165[0][0]
activation_169[0][0]
max_pooling2d_11[0][0]
__________________________________________________________________________________________________
flatten_6 (Flatten) (None, 11520) 0 mixed8[0][0]
__________________________________________________________________________________________________
dense_12 (Dense) (None, 512) 5898752 flatten_6[0][0]
__________________________________________________________________________________________________
dropout_5 (Dropout) (None, 512) 0 dense_12[0][0]
__________________________________________________________________________________________________
dense_13 (Dense) (None, 1) 513 dropout_5[0][0]
==================================================================================================
Total params: 16,574,113
Trainable params: 16,552,737
Non-trainable params: 21,376
__________________________________________________________________________________________________
from tensorflow.keras.preprocessing.image import ImageDataGenerator
# Create run-time augmentation on training and test dataset
# For training datagenerator, we add normalization and validation_split
train_datagen = ImageDataGenerator(
rescale = 1 / 255.0,
rotation_range = 45,
width_shift_range = 0.3,
height_shift_range = 0.3,
shear_range = 0.3,
zoom_range = 0.3,
horizontal_flip = True,
fill_mode = 'nearest',
validation_split = 0.1112)
# For test datagenerator, we only normalize the data.
test_datagen = ImageDataGenerator(rescale=1 / 255.0)
# Flow training images in batches of 128 using train_datagen generator
training_generator = train_datagen.flow_from_directory(
TRAINING_DIR, # This is the source directory for training images
target_size = (150, 150), # All images will be resized to 300x300
batch_size = 20,
subset = 'training',
class_mode = 'binary') # Since we use binary_crossentropy loss, we need binary labels
# Flow training images in batches of 128 using train_datagen generator
validation_generator = train_datagen.flow_from_directory(
TRAINING_DIR, # This is the source directory for training images
target_size = (150, 150), # All images will be resized to 150x150
batch_size = 20,
subset = 'validation',
class_mode ='binary') # Since we use binary_crossentropy loss, we need binary labels
# Flow training images in batches of 128 using train_datagen generator
testing_generator = test_datagen.flow_from_directory(
TESTING_DIR, # This is the source directory for training images
target_size = (150, 150), # All images will be resized to 150x150
batch_size = 20,
class_mode ='binary') # Since we use binary_crossentropy loss, we need binary labels
Found 2400 images belonging to 2 classes. Found 300 images belonging to 2 classes. Found 300 images belonging to 2 classes.
#using early stopping to exit training if validation loss is not decreasing even after certain epochs (patience)
earlystopping = EarlyStopping(monitor='val_loss', mode='min', verbose=1, patience = 3)
#save the best model with lower validation loss
#checkpointer = ModelCheckpoint(filepath="weights.hdf5", verbose=1, save_best_only=True)
# Using Callback to control training
class myCallback(tf.keras.callbacks.Callback):
def on_epoch_end(self, epoch, log={}):
if(log.get('val_accuracy') > 0.96):
print("\nReached {}% val_accuracy so cancelling training!".format(round(100 * log.get('val_accuracy'), 2)))
self.model.stop_training = True
# callback instance
callbacks = myCallback()
history3 = model.fit(
training_generator,
steps_per_epoch = training_generator.n // 20,
epochs = 10,
validation_data = validation_generator,
validation_steps = validation_generator.n // 20,
callbacks = [callbacks, earlystopping],
verbose = 2
)
Epoch 1/10 120/120 - 413s - loss: 0.4503 - accuracy: 0.7925 - val_loss: 0.3338 - val_accuracy: 0.8867 Epoch 2/10 120/120 - 413s - loss: 0.2744 - accuracy: 0.8858 - val_loss: 0.2382 - val_accuracy: 0.9067 Epoch 3/10 120/120 - 415s - loss: 0.2383 - accuracy: 0.9038 - val_loss: 0.7355 - val_accuracy: 0.8967 Epoch 4/10 120/120 - 413s - loss: 0.2259 - accuracy: 0.9146 - val_loss: 0.5365 - val_accuracy: 0.9067 Epoch 5/10 120/120 - 412s - loss: 0.1887 - accuracy: 0.9296 - val_loss: 0.3835 - val_accuracy: 0.9367 Epoch 00005: early stopping
#load weights
#model.load_weights("weights_.hdf5")
graph_plot(history3)
# Evaluate the performance of the model
evaluate = model.evaluate(testing_generator, steps = testing_generator.n // 20, verbose = 2)
print('Accuracy Test : {}'.format(evaluate[1]))
15/15 - 8s - loss: 0.1496 - accuracy: 0.9700 Accuracy Test : 0.9700000286102295
# Check the number of images in the dataset
test = []
label = []
# os.listdir returns the list of files in the folder, in this case image class names
for i in os.listdir('./testing'):
test_class = os.listdir(os.path.join('testing', i))
for j in test_class:
img = os.path.join('./testing', i, j)
test.append(img)
label.append(i)
print('Number of train images : {} \n'.format(len(test)))
Number of train images : 300
# Create Dataframe
test_df = pd.DataFrame({'Image': test, 'Labels': label})
test_df
| Image | Labels | |
|---|---|---|
| 0 | ./testing\cats\cat.101.jpg | cats |
| 1 | ./testing\cats\cat.112.jpg | cats |
| 2 | ./testing\cats\cat.115.jpg | cats |
| 3 | ./testing\cats\cat.116.jpg | cats |
| 4 | ./testing\cats\cat.124.jpg | cats |
| ... | ... | ... |
| 295 | ./testing\dogs\dog.958.jpg | dogs |
| 296 | ./testing\dogs\dog.96.jpg | dogs |
| 297 | ./testing\dogs\dog.963.jpg | dogs |
| 298 | ./testing\dogs\dog.97.jpg | dogs |
| 299 | ./testing\dogs\dog.997.jpg | dogs |
300 rows × 2 columns
from PIL.Image import open
from sklearn.metrics import confusion_matrix, classification_report, accuracy_score
prediction = []
original = []
image = []
count = 0
label = {0:'cats', 1: 'dogs'}
a = []
b = []
for item in range(len(test_df)):
# code to open the image
img= open(test_df['Image'].tolist()[item])
# resizing the image to (150,150)
img = img.resize((150, 150))
# appending image to the image list
image.append(img)
# converting image to array
img = np.asarray(img, dtype= np.float32)
# normalizing the image
img = img / 255.0
# reshaping the image in to a 4D array
img = img.reshape(-1, 150, 150, 3)
# making prediction of the model
predict = model.predict(img)
a.append(predict[0][0])
# getting the index corresponding to the highest value in the prediction
#index_predict = np.argmax(predict) use it if the activate function is softmax
index_predict = 0 if predict[0][0] < 0.5 else 1
b.append(index_predict)
# appending the predicted class to the list
prediction.append(label[index_predict])
# appending original class to the list
original.append(test_df['Labels'].tolist()[item])
results = pd.DataFrame({'Original': original, 'Prediction': prediction, 'Probability': a, 'Class': b})
results
| Original | Prediction | Probability | Class | |
|---|---|---|---|---|
| 0 | cats | cats | 1.968018e-18 | 0 |
| 1 | cats | cats | 2.564942e-09 | 0 |
| 2 | cats | cats | 3.059364e-28 | 0 |
| 3 | cats | cats | 6.273518e-07 | 0 |
| 4 | cats | cats | 4.219905e-13 | 0 |
| ... | ... | ... | ... | ... |
| 295 | dogs | dogs | 1.000000e+00 | 1 |
| 296 | dogs | dogs | 1.000000e+00 | 1 |
| 297 | dogs | dogs | 9.662517e-01 | 1 |
| 298 | dogs | dogs | 1.000000e+00 | 1 |
| 299 | dogs | dogs | 1.000000e+00 | 1 |
300 rows × 4 columns
# Getting the test accuracy
score = accuracy_score(original, prediction)
print("Test Accuracy : {}".format(score))
Test Accuracy : 0.9766666666666667
# Visualizing the results
import random
plt.figure(figsize = (50, 50))
for i in range(40):
j = random.randint(0,len(image))
plt.subplot(10, 4, i+1)
col = 'green' if original[j] == prediction[j] else 'red'
plt.xlabel("Original: " + original[j] + " Prediction: " + prediction[j], fontsize = 30, color = col)
plt.xticks([])
plt.yticks([])
plt.imshow(image[j])
plt.show()
# Print out the classification report
print(classification_report(np.asarray(original), np.asarray(prediction)))
precision recall f1-score support
cats 0.97 0.98 0.98 150
dogs 0.98 0.97 0.98 150
accuracy 0.98 300
macro avg 0.98 0.98 0.98 300
weighted avg 0.98 0.98 0.98 300
import seaborn as sns
# plot the confusion matrix
plt.figure(figsize = (5,5))
cm = confusion_matrix(np.asarray(original), np.asarray(prediction))
ax = plt.subplot()
sns.heatmap(cm, annot = True, ax = ax)
ax.set_xlabel('Predicted')
ax.set_ylabel('Original')
ax.set_title('Confusion_matrix')
Text(0.5, 1.0, 'Confusion_matrix')